[ 545.308495] env[62816]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62816) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 545.308820] env[62816]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62816) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 545.308939] env[62816]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62816) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 545.309267] env[62816]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 545.402833] env[62816]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62816) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 545.413909] env[62816]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=62816) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 546.013992] env[62816]: INFO nova.virt.driver [None req-56daa60d-420e-4dbb-8940-1e5e53a14d5b None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 546.083950] env[62816]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 546.084138] env[62816]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 546.084237] env[62816]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62816) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 549.259425] env[62816]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-4697af94-13ae-4afd-a4e5-4608e888ab15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.276385] env[62816]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62816) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 549.276609] env[62816]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-427cd5e7-f426-4dd9-9dd0-1a13d9d378b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.310641] env[62816]: INFO oslo_vmware.api [-] Successfully established new session; session ID is e7e76. [ 549.310885] env[62816]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.227s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.311379] env[62816]: INFO nova.virt.vmwareapi.driver [None req-56daa60d-420e-4dbb-8940-1e5e53a14d5b None None] VMware vCenter version: 7.0.3 [ 549.314823] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc3de1c-e9ba-4bdd-9e7b-edf9fff5f924 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.332393] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5272bfe6-3675-453e-b531-925295bfa892 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.338306] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4010df6d-3b31-40d2-8f6b-dfd1b0d59e4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.344922] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e014c28e-2303-4d2c-8871-6ccd5584c30e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.357905] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44aa9dc-6b58-497f-a556-c8d743e1cae9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.363608] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b740efc-5f8b-4a3f-bc28-081f08e8a336 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.394040] env[62816]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-fc4d9131-44b2-4019-aa49-27c1fd42aaf0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.398654] env[62816]: DEBUG nova.virt.vmwareapi.driver [None req-56daa60d-420e-4dbb-8940-1e5e53a14d5b None None] Extension org.openstack.compute already exists. {{(pid=62816) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 549.401287] env[62816]: INFO nova.compute.provider_config [None req-56daa60d-420e-4dbb-8940-1e5e53a14d5b None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 549.904386] env[62816]: DEBUG nova.context [None req-56daa60d-420e-4dbb-8940-1e5e53a14d5b None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),18798693-7c9b-4a04-8e39-522aceb095b2(cell1) {{(pid=62816) load_cells /opt/stack/nova/nova/context.py:464}} [ 549.906824] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.907344] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.908043] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.908501] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Acquiring lock "18798693-7c9b-4a04-8e39-522aceb095b2" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 549.908697] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Lock "18798693-7c9b-4a04-8e39-522aceb095b2" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.909727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Lock "18798693-7c9b-4a04-8e39-522aceb095b2" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.930436] env[62816]: INFO dbcounter [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Registered counter for database nova_cell0 [ 549.939105] env[62816]: INFO dbcounter [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Registered counter for database nova_cell1 [ 549.942359] env[62816]: DEBUG oslo_db.sqlalchemy.engines [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62816) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 549.942717] env[62816]: DEBUG oslo_db.sqlalchemy.engines [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62816) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 549.947702] env[62816]: ERROR nova.db.main.api [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.947702] env[62816]: result = function(*args, **kwargs) [ 549.947702] env[62816]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.947702] env[62816]: return func(*args, **kwargs) [ 549.947702] env[62816]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 549.947702] env[62816]: result = fn(*args, **kwargs) [ 549.947702] env[62816]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 549.947702] env[62816]: return f(*args, **kwargs) [ 549.947702] env[62816]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 549.947702] env[62816]: return db.service_get_minimum_version(context, binaries) [ 549.947702] env[62816]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 549.947702] env[62816]: _check_db_access() [ 549.947702] env[62816]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 549.947702] env[62816]: stacktrace = ''.join(traceback.format_stack()) [ 549.947702] env[62816]: [ 549.948841] env[62816]: ERROR nova.db.main.api [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.948841] env[62816]: result = function(*args, **kwargs) [ 549.948841] env[62816]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 549.948841] env[62816]: return func(*args, **kwargs) [ 549.948841] env[62816]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 549.948841] env[62816]: result = fn(*args, **kwargs) [ 549.948841] env[62816]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 549.948841] env[62816]: return f(*args, **kwargs) [ 549.948841] env[62816]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 549.948841] env[62816]: return db.service_get_minimum_version(context, binaries) [ 549.948841] env[62816]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 549.948841] env[62816]: _check_db_access() [ 549.948841] env[62816]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 549.948841] env[62816]: stacktrace = ''.join(traceback.format_stack()) [ 549.948841] env[62816]: [ 549.949388] env[62816]: WARNING nova.objects.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 549.949388] env[62816]: WARNING nova.objects.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Failed to get minimum service version for cell 18798693-7c9b-4a04-8e39-522aceb095b2 [ 549.949819] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Acquiring lock "singleton_lock" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.949980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Acquired lock "singleton_lock" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.950233] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Releasing lock "singleton_lock" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.950562] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Full set of CONF: {{(pid=62816) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 549.950709] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ******************************************************************************** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 549.950839] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Configuration options gathered from: {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 549.950980] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 549.951185] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 549.951316] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ================================================================================ {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 549.951526] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] allow_resize_to_same_host = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.951697] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] arq_binding_timeout = 300 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.951830] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] backdoor_port = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.951959] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] backdoor_socket = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.952138] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] block_device_allocate_retries = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.952304] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] block_device_allocate_retries_interval = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.952473] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cert = self.pem {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.952646] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.952810] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute_monitors = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.952979] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] config_dir = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.953163] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] config_drive_format = iso9660 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.953298] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.953463] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] config_source = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.953629] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] console_host = devstack {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.953793] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] control_exchange = nova {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.953954] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cpu_allocation_ratio = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.954125] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] daemon = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.954295] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] debug = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.954451] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] default_access_ip_network_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.954616] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] default_availability_zone = nova {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.954792] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] default_ephemeral_format = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.954963] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] default_green_pool_size = 1000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.955223] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.955392] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] default_schedule_zone = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.955552] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] disk_allocation_ratio = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.955724] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] enable_new_services = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.955936] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] enabled_apis = ['osapi_compute'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.956135] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] enabled_ssl_apis = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.956298] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] flat_injected = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.956457] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] force_config_drive = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.956615] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] force_raw_images = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.956783] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] graceful_shutdown_timeout = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.956944] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] heal_instance_info_cache_interval = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.957178] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] host = cpu-1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.957353] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.957519] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] initial_disk_allocation_ratio = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.957680] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] initial_ram_allocation_ratio = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.957892] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.958069] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_build_timeout = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.958233] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_delete_interval = 300 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.958401] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_format = [instance: %(uuid)s] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.958568] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_name_template = instance-%08x {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.958735] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_usage_audit = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.958900] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_usage_audit_period = month {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.959078] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.959247] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] instances_path = /opt/stack/data/nova/instances {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.959411] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] internal_service_availability_zone = internal {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.959566] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] key = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.959724] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] live_migration_retry_count = 30 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.959897] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_color = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.960077] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_config_append = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.960248] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.960409] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_dir = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.960566] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.960694] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_options = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.960854] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_rotate_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961032] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_rotate_interval_type = days {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961204] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] log_rotation_type = none {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961334] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961458] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961624] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961786] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.961913] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.962085] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] long_rpc_timeout = 1800 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.962248] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] max_concurrent_builds = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.962408] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] max_concurrent_live_migrations = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.962570] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] max_concurrent_snapshots = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.962732] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] max_local_block_devices = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.962891] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] max_logfile_count = 30 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.963060] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] max_logfile_size_mb = 200 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.963224] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] maximum_instance_delete_attempts = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.963389] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metadata_listen = 0.0.0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.963560] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metadata_listen_port = 8775 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.963729] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metadata_workers = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.963892] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] migrate_max_retries = -1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.964072] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] mkisofs_cmd = genisoimage {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.964279] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] my_block_storage_ip = 10.180.1.21 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.964411] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] my_ip = 10.180.1.21 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.964572] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] network_allocate_retries = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.964787] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.964958] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] osapi_compute_listen = 0.0.0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.965141] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] osapi_compute_listen_port = 8774 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.965313] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] osapi_compute_unique_server_name_scope = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.965483] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] osapi_compute_workers = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.965650] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] password_length = 12 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.965866] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] periodic_enable = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.966069] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] periodic_fuzzy_delay = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.966251] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] pointer_model = usbtablet {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.966422] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] preallocate_images = none {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.966585] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] publish_errors = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.966719] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] pybasedir = /opt/stack/nova {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.966880] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ram_allocation_ratio = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.967054] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] rate_limit_burst = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.967222] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] rate_limit_except_level = CRITICAL {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.967380] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] rate_limit_interval = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.967538] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reboot_timeout = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.967694] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reclaim_instance_interval = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.967859] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] record = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.968034] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reimage_timeout_per_gb = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.968206] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] report_interval = 120 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.968366] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] rescue_timeout = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.968528] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reserved_host_cpus = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.968687] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reserved_host_disk_mb = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.968875] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reserved_host_memory_mb = 512 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.969075] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] reserved_huge_pages = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.969242] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] resize_confirm_window = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.969404] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] resize_fs_using_block_device = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.969563] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] resume_guests_state_on_host_boot = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.969731] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.969897] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] rpc_response_timeout = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.970068] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] run_external_periodic_tasks = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.970241] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] running_deleted_instance_action = reap {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.970403] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] running_deleted_instance_poll_interval = 1800 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.970565] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] running_deleted_instance_timeout = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.970725] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler_instance_sync_interval = 120 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.970902] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_down_time = 720 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.971080] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] servicegroup_driver = db {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.971245] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] shell_completion = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.971408] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] shelved_offload_time = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.971570] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] shelved_poll_interval = 3600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.971742] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] shutdown_timeout = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.971932] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] source_is_ipv6 = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.972120] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ssl_only = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.972373] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.972546] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] sync_power_state_interval = 600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.972710] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] sync_power_state_pool_size = 1000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.972879] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] syslog_log_facility = LOG_USER {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.973051] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] tempdir = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.973215] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] timeout_nbd = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.973382] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] transport_url = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.973543] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] update_resources_interval = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.973704] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_cow_images = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.973870] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_eventlog = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.974039] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_journal = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.974202] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_json = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.974362] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_rootwrap_daemon = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.974518] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_stderr = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.974676] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] use_syslog = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.974873] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vcpu_pin_set = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.975077] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plugging_is_fatal = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.975255] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plugging_timeout = 300 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.975422] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] virt_mkfs = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.975587] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] volume_usage_poll_interval = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.975752] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] watch_log_file = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.975946] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] web = /usr/share/spice-html5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 549.976155] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.976328] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.976496] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.976669] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_concurrency.disable_process_locking = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.977227] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.977429] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.977605] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.977782] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.977984] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.978186] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.978376] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.auth_strategy = keystone {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.978549] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.compute_link_prefix = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.978729] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979036] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.dhcp_domain = novalocal {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979091] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.enable_instance_password = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979255] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.glance_link_prefix = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979424] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979596] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979758] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.instance_list_per_project_cells = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.979924] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.list_records_by_skipping_down_cells = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.980101] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.local_metadata_per_cell = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.980278] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.max_limit = 1000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.980449] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.metadata_cache_expiration = 15 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.980623] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.neutron_default_tenant_id = default {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.980793] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.response_validation = warn {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.981009] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.use_neutron_default_nets = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.981198] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.981366] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.981536] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.981710] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.981883] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_dynamic_targets = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.982062] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_jsonfile_path = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.982248] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.982444] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.backend = dogpile.cache.memcached {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.982614] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.backend_argument = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.982775] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.backend_expiration_time = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.982948] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.config_prefix = cache.oslo {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.983134] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.dead_timeout = 60.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.983303] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.debug_cache_backend = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.983467] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.enable_retry_client = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.983632] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.enable_socket_keepalive = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.983801] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.enabled = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.983994] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.enforce_fips_mode = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.984184] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.expiration_time = 600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.984353] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.hashclient_retry_attempts = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.984522] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.hashclient_retry_delay = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.984689] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_dead_retry = 300 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.984883] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_password = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.985069] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.985242] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.985408] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_pool_maxsize = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.985573] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.985736] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_sasl_enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.985938] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.986131] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_socket_timeout = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.986300] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.memcache_username = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.986471] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.proxies = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.986638] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_db = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.986802] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_password = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.987014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_sentinel_service_name = mymaster {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.987211] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.987387] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_server = localhost:6379 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.987554] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_socket_timeout = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.987718] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.redis_username = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.987886] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.retry_attempts = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.988065] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.retry_delay = 0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.988238] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.socket_keepalive_count = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.988404] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.socket_keepalive_idle = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.988571] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.socket_keepalive_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.988733] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.tls_allowed_ciphers = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.988898] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.tls_cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.989073] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.tls_certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.989246] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.tls_enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.989411] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cache.tls_keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.989585] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.989762] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.auth_type = password {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.989946] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.990154] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.catalog_info = volumev3::publicURL {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.990323] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.990492] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.990658] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.cross_az_attach = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.990823] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.debug = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.990991] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.endpoint_template = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.991175] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.http_retries = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.991344] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.991505] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.991681] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.os_region_name = RegionOne {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.991850] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.992028] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cinder.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.992208] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.992374] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.cpu_dedicated_set = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.992537] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.cpu_shared_set = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.992707] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.image_type_exclude_list = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.992904] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.993090] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.max_concurrent_disk_ops = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.993261] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.max_disk_devices_to_attach = -1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.993429] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.993601] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.993768] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.resource_provider_association_refresh = 300 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.993936] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.994114] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.shutdown_retry_interval = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.994298] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.994477] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] conductor.workers = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.994654] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] console.allowed_origins = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.994845] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] console.ssl_ciphers = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.995042] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] console.ssl_minimum_version = default {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.995223] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] consoleauth.enforce_session_timeout = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.995396] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] consoleauth.token_ttl = 600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.995571] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.995732] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.995948] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.996139] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.996307] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.996473] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.996640] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.996801] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.996966] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.997143] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.997306] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.997468] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.997632] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.997805] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.service_type = accelerator {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.997973] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.998150] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.998315] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.998474] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.998656] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.998843] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] cyborg.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.999057] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.backend = sqlalchemy {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.999238] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.connection = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.999407] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.connection_debug = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.999578] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.connection_parameters = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.999745] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.connection_recycle_time = 3600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 549.999910] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.connection_trace = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.000085] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.db_inc_retry_interval = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.000255] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.db_max_retries = 20 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.000420] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.db_max_retry_interval = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.000583] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.db_retry_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.000745] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.max_overflow = 50 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.000910] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.max_pool_size = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.001088] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.max_retries = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.001264] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.001426] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.mysql_wsrep_sync_wait = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.001587] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.pool_timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.001752] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.retry_interval = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.001939] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.slave_connection = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.002127] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.sqlite_synchronous = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.002295] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] database.use_db_reconnect = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.002478] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.backend = sqlalchemy {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.002648] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.connection = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.002816] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.connection_debug = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.002988] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.connection_parameters = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.003170] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.connection_recycle_time = 3600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.003337] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.connection_trace = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.003502] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.db_inc_retry_interval = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.003666] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.db_max_retries = 20 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.003830] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.db_max_retry_interval = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.003993] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.db_retry_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.004174] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.max_overflow = 50 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.004339] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.max_pool_size = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.004502] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.max_retries = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.004673] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.004870] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.005062] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.pool_timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.005239] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.retry_interval = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.005404] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.slave_connection = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.005573] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] api_database.sqlite_synchronous = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.005755] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] devices.enabled_mdev_types = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.005957] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.006152] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ephemeral_storage_encryption.default_format = luks {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.006321] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ephemeral_storage_encryption.enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.006487] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.006659] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.api_servers = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.006824] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.006991] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.007172] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.007336] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.007496] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.007660] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.debug = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.007849] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.default_trusted_certificate_ids = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.008045] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.enable_certificate_validation = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.008221] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.enable_rbd_download = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.008386] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.008559] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.008726] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.008892] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.009064] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.009233] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.num_retries = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.009403] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.rbd_ceph_conf = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.009569] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.rbd_connect_timeout = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.009738] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.rbd_pool = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.009911] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.rbd_user = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.010079] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.010245] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.010407] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.010577] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.service_type = image {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.010750] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.010941] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.011124] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.011289] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.011473] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.011639] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.verify_glance_signatures = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.011801] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] glance.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.011973] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] guestfs.debug = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.012158] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] mks.enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.012502] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.012696] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] image_cache.manager_interval = 2400 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.012870] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] image_cache.precache_concurrency = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.013060] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] image_cache.remove_unused_base_images = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.013240] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.013413] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.013594] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] image_cache.subdirectory_name = _base {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.013772] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.api_max_retries = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.013966] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.api_retry_interval = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.014151] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.014322] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.auth_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.014485] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.014647] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.014848] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.015049] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.conductor_group = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.015221] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.015387] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.015551] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.015721] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.015901] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.016089] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.016260] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.016432] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.peer_list = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.016596] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.016759] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.016962] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.serial_console_state_timeout = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.017145] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.017324] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.service_type = baremetal {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.017489] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.shard = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.017656] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.017817] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.017982] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.018222] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.018415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.018586] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ironic.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.018771] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.018948] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] key_manager.fixed_key = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.019149] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.019317] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.barbican_api_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.019479] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.barbican_endpoint = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.019651] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.barbican_endpoint_type = public {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.019818] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.barbican_region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.020010] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.020184] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.020356] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.020522] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.020684] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.020850] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.number_of_retries = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.021026] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.retry_delay = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.021199] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.send_service_user_token = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.021366] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.021528] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.021693] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.verify_ssl = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.021853] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican.verify_ssl_path = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.022034] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.022203] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.auth_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.022364] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.022521] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.022687] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.022877] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.023057] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.023228] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.023390] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] barbican_service_user.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.023559] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.approle_role_id = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.023718] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.approle_secret_id = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.023889] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.kv_mountpoint = secret {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.024063] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.kv_path = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.024233] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.kv_version = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.024394] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.namespace = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.024554] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.root_token_id = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.024722] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.ssl_ca_crt_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.024915] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.timeout = 60.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.025098] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.use_ssl = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.025275] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.025451] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.025616] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.025803] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.026012] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.026192] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.026356] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.026520] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.026681] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.026841] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027011] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027177] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027337] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027496] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027666] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.service_type = identity {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027828] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.027992] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.028167] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.028326] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.028508] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.028670] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] keystone.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.028896] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.connection_uri = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.029088] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_mode = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.029263] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_model_extra_flags = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.029435] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_models = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.029610] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_power_governor_high = performance {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.029783] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_power_governor_low = powersave {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.029951] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_power_management = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.030139] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.030309] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.device_detach_attempts = 8 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.030477] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.device_detach_timeout = 20 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.030645] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.disk_cachemodes = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.030808] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.disk_prefix = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.030978] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.enabled_perf_events = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.031164] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.file_backed_memory = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.031396] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.gid_maps = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.031571] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.hw_disk_discard = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.031735] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.hw_machine_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.031913] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_rbd_ceph_conf = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.032099] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.032272] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.032444] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_rbd_glance_store_name = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.032617] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_rbd_pool = rbd {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.032788] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_type = default {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.032953] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.images_volume_group = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.033130] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.inject_key = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.033295] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.inject_partition = -2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.033457] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.inject_password = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.033622] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.iscsi_iface = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.033786] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.iser_use_multipath = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.033955] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_bandwidth = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.034137] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.034331] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_downtime = 500 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.034506] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.034672] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.034862] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_inbound_addr = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.035054] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.035224] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_permit_post_copy = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.035389] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_scheme = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.035566] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_timeout_action = abort {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.035843] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_tunnelled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.036037] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_uri = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.036211] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.live_migration_with_native_tls = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.036376] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.max_queues = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.036544] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.036773] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.036943] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.nfs_mount_options = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.037276] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.037468] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.037640] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.num_iser_scan_tries = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.037806] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.num_memory_encrypted_guests = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.037976] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.038159] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.num_pcie_ports = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.038329] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.num_volume_scan_tries = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.038499] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.pmem_namespaces = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.038661] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.quobyte_client_cfg = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.038957] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.039157] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rbd_connect_timeout = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.039327] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.039492] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.039650] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rbd_secret_uuid = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.039807] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rbd_user = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.039970] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.040155] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.remote_filesystem_transport = ssh {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.040341] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rescue_image_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.040514] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rescue_kernel_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.040675] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rescue_ramdisk_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.040845] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.041029] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.rx_queue_size = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.041210] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.smbfs_mount_options = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.041503] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.041681] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.snapshot_compression = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.041845] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.snapshot_image_format = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.042090] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.042263] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.sparse_logical_volumes = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.042432] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.swtpm_enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.042603] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.swtpm_group = tss {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.042791] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.swtpm_user = tss {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.042980] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.sysinfo_serial = unique {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.043163] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.tb_cache_size = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.043326] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.tx_queue_size = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.043493] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.uid_maps = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.043659] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.use_virtio_for_bridges = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.043830] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.virt_type = kvm {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.044007] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.volume_clear = zero {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.044180] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.volume_clear_size = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.044349] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.volume_use_multipath = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.044510] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_cache_path = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.044678] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.044875] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_mount_group = qemu {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.045061] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_mount_opts = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.045236] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.045509] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.045687] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.vzstorage_mount_user = stack {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.045897] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.046113] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.046299] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.auth_type = password {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.046464] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.046624] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.046787] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.046953] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.047144] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.047313] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.default_floating_pool = public {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.047476] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.047640] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.extension_sync_interval = 600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.047802] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.http_retries = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.047966] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.048142] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.048303] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.048473] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.048634] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.048821] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.ovs_bridge = br-int {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.049017] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.physnets = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.049194] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.region_name = RegionOne {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.049355] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.049524] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.service_metadata_proxy = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.049684] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.049852] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.service_type = network {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.050026] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.050193] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.050353] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.050511] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.050702] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.050868] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] neutron.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.051052] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] notifications.bdms_in_notifications = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.051233] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] notifications.default_level = INFO {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.051405] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] notifications.notification_format = unversioned {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.051567] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] notifications.notify_on_state_change = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.051741] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.051942] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] pci.alias = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.052134] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] pci.device_spec = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.052305] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] pci.report_in_placement = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.052477] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.052651] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.auth_type = password {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.052819] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.052979] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.053152] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.053316] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.053477] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.053635] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.053791] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.default_domain_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.053951] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.default_domain_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.054121] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.domain_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.054280] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.domain_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.054437] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.054596] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.054795] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.054969] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.055152] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.055325] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.password = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.055486] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.project_domain_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.055648] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.project_domain_name = Default {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.055821] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.project_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.056038] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.project_name = service {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.056218] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.region_name = RegionOne {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.056387] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.056547] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.056715] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.service_type = placement {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.056882] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.057067] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.057226] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.057385] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.system_scope = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.057546] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.057705] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.trust_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.057871] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.user_domain_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.058079] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.user_domain_name = Default {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.058245] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.user_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.058419] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.username = nova {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.058597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.058758] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] placement.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.058943] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.cores = 20 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.059123] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.count_usage_from_placement = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.059296] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.059467] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.injected_file_content_bytes = 10240 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.059633] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.injected_file_path_length = 255 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.059800] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.injected_files = 5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.059969] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.instances = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.060152] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.key_pairs = 100 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.060323] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.metadata_items = 128 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.060493] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.ram = 51200 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.060660] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.recheck_quota = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.060830] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.server_group_members = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.061034] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] quota.server_groups = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.061225] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.061391] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.061554] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.image_metadata_prefilter = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.061717] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.061885] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.max_attempts = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.062070] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.max_placement_results = 1000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.062239] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.062404] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.query_placement_for_image_type_support = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.062566] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.062740] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] scheduler.workers = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.062915] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.063098] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.063282] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.063451] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.063621] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.063787] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.063981] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.064198] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.064381] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.host_subset_size = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.064550] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.064726] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.064903] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.065087] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.isolated_hosts = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.065255] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.isolated_images = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.065418] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.065579] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.065743] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.065929] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.pci_in_placement = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.066113] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.066277] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.066439] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.066600] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.066763] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.066962] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.067167] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.track_instance_changes = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.067359] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.067562] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metrics.required = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.067735] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metrics.weight_multiplier = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.067907] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.068089] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] metrics.weight_setting = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.068402] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.068580] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] serial_console.enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.068758] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] serial_console.port_range = 10000:20000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.068931] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.069113] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.069288] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] serial_console.serialproxy_port = 6083 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.069458] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.069634] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.auth_type = password {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.069796] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.069994] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.070198] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.070369] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.070532] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.070706] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.send_service_user_token = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.070873] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.071047] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] service_user.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.071239] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.agent_enabled = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.071406] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.071712] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.071916] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.072106] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.html5proxy_port = 6082 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.072274] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.image_compression = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.072434] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.jpeg_compression = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.072597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.playback_compression = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.072781] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.require_secure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.072979] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.server_listen = 127.0.0.1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.073199] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.073395] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.streaming_mode = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.073586] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] spice.zlib_compression = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074584] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] upgrade_levels.baseapi = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074584] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] upgrade_levels.compute = auto {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074584] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] upgrade_levels.conductor = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074584] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] upgrade_levels.scheduler = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074584] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074584] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.auth_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074904] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.074935] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.075102] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.075310] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.075499] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076402] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076402] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vendordata_dynamic_auth.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076402] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.api_retry_count = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076402] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.ca_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076402] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.cache_prefix = devstack-image-cache {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076573] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.cluster_name = testcl1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076716] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.connection_pool_size = 10 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.076880] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.console_delay_seconds = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.077062] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.datastore_regex = ^datastore.* {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.host_password = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.host_port = 443 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.host_username = administrator@vsphere.local {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.insecure = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.integration_bridge = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081014] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.maximum_objects = 100 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.pbm_default_policy = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.pbm_enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.pbm_wsdl_location = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.serial_port_proxy_uri = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.serial_port_service_uri = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.task_poll_interval = 0.5 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.use_linked_clone = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.vnc_keymap = en-us {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.vnc_port = 5900 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vmware.vnc_port_total = 10000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.auth_schemes = ['none'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081415] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.novncproxy_port = 6080 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.server_listen = 127.0.0.1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.vencrypt_ca_certs = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081597] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.vencrypt_client_cert = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081789] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vnc.vencrypt_client_key = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.081911] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.082099] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.disable_deep_image_inspection = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.082268] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.082428] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.082587] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.082744] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.disable_rootwrap = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.082901] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.enable_numa_live_migration = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.083069] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.083229] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.083386] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.083542] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.libvirt_disable_apic = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.083698] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.083874] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.084045] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.084211] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.084374] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.084536] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.084698] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.084883] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.085059] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.085230] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.085413] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.085582] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.client_socket_timeout = 900 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.085746] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.default_pool_size = 1000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.085939] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.keep_alive = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.086127] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.max_header_line = 16384 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.086292] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.secure_proxy_ssl_header = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.086453] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.ssl_ca_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.086612] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.ssl_cert_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.086771] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.ssl_key_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.086937] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.tcp_keepidle = 600 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.087130] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.087303] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] zvm.ca_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.087464] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] zvm.cloud_connector_url = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.087756] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.087942] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] zvm.reachable_timeout = 300 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.088141] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.enforce_new_defaults = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.088512] env[62816]: WARNING oslo_config.cfg [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 550.088695] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.enforce_scope = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.088871] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.policy_default_rule = default {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.089063] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.089243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.policy_file = policy.yaml {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.089423] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.089586] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.089748] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.089911] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.090089] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.090265] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_policy.remote_timeout = 60.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.090436] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.090612] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.090791] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.connection_string = messaging:// {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.090963] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.enabled = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.091150] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.es_doc_type = notification {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.091317] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.es_scroll_size = 10000 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.091488] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.es_scroll_time = 2m {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.091653] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.filter_error_trace = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.091822] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.hmac_keys = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.091993] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.sentinel_service_name = mymaster {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.092177] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.socket_timeout = 0.1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.092345] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.trace_requests = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.092510] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler.trace_sqlalchemy = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.092694] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler_jaeger.process_tags = {} {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.092853] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler_jaeger.service_name_prefix = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.093023] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] profiler_otlp.service_name_prefix = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.093194] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] remote_debug.host = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.093356] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] remote_debug.port = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.093534] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.093697] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.093860] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.094035] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.094202] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.094363] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.094522] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.094682] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.094864] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.095048] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.095217] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.095385] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.095549] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.095716] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.095903] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.096099] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.096267] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.096440] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.096603] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.096763] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.096926] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.097100] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.097263] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.097428] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.097587] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.097744] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.097905] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.098075] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.098243] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.098407] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.ssl = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.098577] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.098745] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.098907] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.099085] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.099255] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.ssl_version = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.099417] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.099601] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.099769] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_notifications.retry = -1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.099955] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.100139] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_messaging_notifications.transport_url = **** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.100312] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.auth_section = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.100477] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.auth_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.100635] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.cafile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.100791] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.certfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.100954] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.collect_timing = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.101127] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.connect_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.101285] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.connect_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.101444] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.endpoint_id = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.101610] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.endpoint_interface = publicURL {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.101766] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.endpoint_override = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.101923] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.endpoint_region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.102090] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.endpoint_service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.102251] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.endpoint_service_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.102414] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.insecure = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.102569] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.keyfile = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.102729] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.max_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.102886] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.min_version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.103058] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.region_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.103221] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.retriable_status_codes = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.103376] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.service_name = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.103533] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.service_type = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.103691] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.split_loggers = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.103846] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.status_code_retries = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.104008] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.status_code_retry_delay = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.104175] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.timeout = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.104332] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.valid_interfaces = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.104488] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_limit.version = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.104653] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_reports.file_event_handler = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.104841] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.105041] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] oslo_reports.log_dir = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.105233] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.105396] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.105555] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.105720] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.105887] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.106057] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.106235] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.106399] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_ovs_privileged.group = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.106558] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.106723] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.106888] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.107058] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] vif_plug_ovs_privileged.user = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.107230] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.flat_interface = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.107408] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.107579] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.107748] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.107917] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.108093] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.108262] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.108423] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.108599] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.108767] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.isolate_vif = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.108934] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.109112] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.109282] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.109450] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.ovsdb_interface = native {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.109611] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] os_vif_ovs.per_port_bridge = False {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.109784] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] privsep_osbrick.capabilities = [21] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.109943] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] privsep_osbrick.group = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.110115] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] privsep_osbrick.helper_command = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.110282] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.110444] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.110602] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] privsep_osbrick.user = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.110774] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.110934] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] nova_sys_admin.group = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.111101] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] nova_sys_admin.helper_command = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.111268] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.111431] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.111588] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] nova_sys_admin.user = None {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 550.111717] env[62816]: DEBUG oslo_service.service [None req-3ddad715-7b30-4f51-b377-f14885380c4c None None] ******************************************************************************** {{(pid=62816) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 550.112214] env[62816]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 550.616070] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Getting list of instances from cluster (obj){ [ 550.616070] env[62816]: value = "domain-c8" [ 550.616070] env[62816]: _type = "ClusterComputeResource" [ 550.616070] env[62816]: } {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 550.617269] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605f00fb-a50e-44c5-af7a-d527b736d7f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.626465] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Got total of 0 instances {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 550.626973] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 550.627478] env[62816]: INFO nova.virt.node [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Generated node identity 27f49c85-1bb9-4d17-a914-e2f45a5e84fa [ 550.627756] env[62816]: INFO nova.virt.node [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Wrote node identity 27f49c85-1bb9-4d17-a914-e2f45a5e84fa to /opt/stack/data/n-cpu-1/compute_id [ 551.130203] env[62816]: WARNING nova.compute.manager [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Compute nodes ['27f49c85-1bb9-4d17-a914-e2f45a5e84fa'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 552.135207] env[62816]: INFO nova.compute.manager [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 553.140788] env[62816]: WARNING nova.compute.manager [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 553.141172] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.141316] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.141468] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.141627] env[62816]: DEBUG nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 553.142591] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4471107d-8720-485a-85f7-895f35522ce0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.151088] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-447354d9-7908-40f7-b74f-0a9195ad3ac3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.164467] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913cb5ab-e60b-46d4-bc7f-56ea2d77f77b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.170914] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258b0f30-0352-408a-bbf0-8444fc9a8330 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.199986] env[62816]: DEBUG nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181391MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 553.200153] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.200342] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.702818] env[62816]: WARNING nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] No compute node record for cpu-1:27f49c85-1bb9-4d17-a914-e2f45a5e84fa: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 27f49c85-1bb9-4d17-a914-e2f45a5e84fa could not be found. [ 554.206984] env[62816]: INFO nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa [ 555.714963] env[62816]: DEBUG nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 555.715411] env[62816]: DEBUG nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 555.871844] env[62816]: INFO nova.scheduler.client.report [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] [req-ea5f0b60-5d84-402a-9936-13585f503c20] Created resource provider record via placement API for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 555.888367] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab682375-1843-42e8-80b9-c095592546d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.896029] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d7940a-67fb-45e7-82a8-fc0aff36a13b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.924967] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0152244b-c020-488f-935b-3b1c8c59eda6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.931611] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4e9e1e-ca44-4682-b69b-d0a753dc90a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.943928] env[62816]: DEBUG nova.compute.provider_tree [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 556.482770] env[62816]: DEBUG nova.scheduler.client.report [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 556.483034] env[62816]: DEBUG nova.compute.provider_tree [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 0 to 1 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 556.483184] env[62816]: DEBUG nova.compute.provider_tree [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 556.533285] env[62816]: DEBUG nova.compute.provider_tree [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 1 to 2 during operation: update_traits {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 557.038500] env[62816]: DEBUG nova.compute.resource_tracker [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 557.038828] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.838s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.038880] env[62816]: DEBUG nova.service [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Creating RPC server for service compute {{(pid=62816) start /opt/stack/nova/nova/service.py:186}} [ 557.051324] env[62816]: DEBUG nova.service [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] Join ServiceGroup membership for this service compute {{(pid=62816) start /opt/stack/nova/nova/service.py:203}} [ 557.051528] env[62816]: DEBUG nova.servicegroup.drivers.db [None req-8e854053-0e3c-4a42-9c5a-5ec2ae7783b9 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62816) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 602.053277] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.556242] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Getting list of instances from cluster (obj){ [ 602.556242] env[62816]: value = "domain-c8" [ 602.556242] env[62816]: _type = "ClusterComputeResource" [ 602.556242] env[62816]: } {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 602.557743] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f869e53-14a8-48f0-8031-1bff5fea4c7f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.566037] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Got total of 0 instances {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 602.566259] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 602.566551] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Getting list of instances from cluster (obj){ [ 602.566551] env[62816]: value = "domain-c8" [ 602.566551] env[62816]: _type = "ClusterComputeResource" [ 602.566551] env[62816]: } {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 602.567383] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cd1d7c-e255-4e93-a091-ff41f4d15629 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.574651] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Got total of 0 instances {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 605.466051] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.466484] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.466484] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 605.466484] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 605.969188] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 605.969429] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.969870] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.970268] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.970592] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.970938] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.971291] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 605.972584] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 605.972584] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 606.474968] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.475388] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 606.475445] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.475576] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 606.476456] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741aab6d-c81f-4486-8258-ecdb97499275 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.484361] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25844f82-8fcb-488f-bef5-5d46ba0b1036 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.498217] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37b73df-35f9-4fc8-8911-b00429a4d305 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.504313] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b7bd59-1922-4a56-a29f-ec435c071258 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.531974] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181391MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 606.532128] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 606.532316] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.550425] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 607.550705] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 607.564972] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af67e2dc-7a7f-4ced-b379-cde80fa3b08c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.572511] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a078c0bb-0603-4c86-9516-0c9fcb27e975 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.601690] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-649bf7ba-d821-47e0-8a59-4864a3aed7a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.609154] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceed8563-7f1f-4be8-8527-8cc52307f47e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.622370] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.125913] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 608.630914] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 608.631291] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.099s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.616820] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.617195] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.122084] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.122264] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 669.122381] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 669.624841] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 669.625279] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.625279] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.625455] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.625515] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.625659] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.625797] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 669.625938] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 669.626088] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 670.129115] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.129408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.129522] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.129678] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 670.130608] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eafc5ae-92a7-40f5-9c33-cd2e6aab3a17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.138822] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c340c64b-be09-4e53-ae9b-fde5e46f1314 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.152528] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a9bc41-0f12-424c-b19a-41db905533c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.158591] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffeb4421-beb0-420e-99ad-685b62d8571f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.186161] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 670.186290] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.186468] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.204348] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 671.204613] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 671.217099] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca2f8f3-13b5-46c9-bc2b-f8104228c5ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.225454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0c67b1-5bec-46d9-9108-d3fd60f05e6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.253814] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c4b99f-b9a5-4df5-8ae1-c1b791b2fb0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.260915] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15c5334-39a1-4ffa-96ea-ad12f98d8289 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.273504] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.776609] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 671.778079] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 671.778326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.780934] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.780934] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 731.780934] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 731.781412] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 732.284127] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 732.284675] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.284965] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.285241] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.285536] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.287051] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.287051] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.287051] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 732.287051] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 732.791073] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.791073] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.791073] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.791073] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 732.791073] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32483cd3-ef5c-42c7-8452-fc493d5717a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.799178] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be71071-d245-4785-a863-479a76da5fa0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.813191] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfa9f85-78da-49e0-89c2-b19481209294 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.819405] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bf6b9c-3071-46cf-95bf-d0d83488cac6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.847547] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181378MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 732.847848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.848175] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.867388] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 733.867621] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 733.880048] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7dd9b3-b87d-4335-959e-7b26fe1b7a0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.887503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71404d21-f4e3-466f-ae28-9d55281a04b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.917419] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3afebc2-f3d1-446f-bdae-cebfdc2bb69b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.924039] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79e8eaf-1abf-42c2-b8b8-5e0386db99fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.936586] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.439171] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.440467] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 734.440641] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.116223] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.116223] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.620241] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 790.620413] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 790.620537] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 791.123776] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 791.124179] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.124223] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.124382] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.124526] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.124664] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.124806] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.124938] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 791.125102] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 791.627957] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.628222] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.628389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.628541] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 791.629433] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6ef012-ad26-4d19-8cbe-057ac678c9f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.637684] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9f7e10-7173-4350-9504-53f404e04322 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.651506] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad6ea80c-849b-4965-b589-44e6cd513e87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.658114] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee77888-9544-4922-b76b-c90715858d10 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.686943] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181383MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 791.687142] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.687290] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.706498] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 792.706738] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 792.720358] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b78d0f79-0e41-4430-8476-33fc8831aef2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.727787] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed685f8-a3a7-4b9b-a7d6-1049a523575e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.757570] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d938ddb-e1cf-4062-b100-550b23cf9c1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.764972] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a6ba12-a0da-407e-993b-378186dbfdc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.778262] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.281308] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 793.282682] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 793.282842] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.456461] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 845.456859] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 845.960377] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] There are 0 instances to clean {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 845.960645] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 845.960786] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances with incomplete migration {{(pid=62816) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 846.463905] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.966976] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.967370] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.967413] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 848.967539] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 848.967703] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 849.471081] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.471356] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.471535] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.471699] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 849.472626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2501a3-c073-45ff-95f2-61239962fc3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.480884] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0ab2ec3-3b79-48e2-9335-7182570d6d1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.495429] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d169415-865c-486d-a88f-2aceeeb2434d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.502715] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e859a2dd-31b2-4e3d-b4c8-83808049c08a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.528940] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 849.529103] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.529283] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.547476] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 850.547736] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 850.561505] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd08ca62-ac04-4e50-a5b3-542fcc3e6bf5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.568752] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b533c62-a50c-4eba-aabd-3eafa90326ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.597850] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f6b3ae-50a8-4063-977d-876f1eeda3ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.604403] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b5e5a5-bca7-4d43-9be6-05b4e9dd736e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.616755] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.119791] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 851.121066] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 851.121255] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.610994] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.611373] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 851.611373] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 852.114798] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 852.114978] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.115168] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.115320] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 852.115498] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.456707] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.457129] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 907.457241] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 908.456956] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 908.959904] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 908.960184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 908.960354] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.960526] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 908.961440] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c8f6fc-08c8-41a6-8411-edd17d5b1e6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.969676] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39480243-3c73-4d6b-9da7-9895289f46d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.983495] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c550cb92-8435-424f-9bbb-71699ee65b82 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.989806] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83fba79-98a9-400a-a9b4-ae08ef440ea3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.019690] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 909.019839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.020024] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.059417] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 910.059708] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 910.076053] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 910.090037] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 910.090037] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 910.104349] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 910.119499] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 910.130182] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47944472-eda5-4844-8e4b-82c626cc2def {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.137495] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1dddc7-2cda-4556-9b60-999bac6fb584 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.165985] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-835ed5d5-777c-441e-8186-a323659a9887 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.172839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0c2018-038e-4f9d-b91b-4cafab2c3603 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.186124] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.689506] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 910.690775] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 910.690955] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.671s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.685644] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 911.686041] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.190650] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.190850] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.191018] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.457502] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.457690] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 912.457757] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 912.960612] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 912.960936] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.457692] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.458093] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 968.458093] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 968.961194] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.961459] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.961666] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 968.961860] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 968.962756] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abeceb93-1fdc-427a-8c00-101b0e34d141 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.971427] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a894e9-12e9-4aef-b429-1c76759d5411 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.984991] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c2a107-8963-4682-a216-d2a287279f12 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.991051] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b8cee4-aceb-489f-9348-7c823e098f17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.019235] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181386MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 969.019383] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.019552] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.037782] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 970.038027] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 970.050287] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c50eb27-5e0d-40a1-8a58-de30b747c5fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.057777] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfbf0da-8059-404f-8049-2be26e8ed1f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.086211] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec3e7fd-5793-4a2a-aa40-74aebd45a304 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.092699] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1764f436-44d1-473b-bf63-421bdd1fbf0a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.105577] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 970.608836] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 970.610157] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 970.610340] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.609584] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.609981] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.609981] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 971.610228] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.457288] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.458149] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 973.458541] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 973.458541] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 973.961629] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 973.962190] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.457215] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.457634] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1029.457634] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1029.960419] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.960678] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.960851] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.961016] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1029.961913] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a0c4b7-7f04-4f8e-9118-e31e932899f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.969963] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd71a7bd-b2ea-412f-b17e-a98895bfedce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.983377] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc86890e-76b4-4179-845c-bd62582dd64e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.989314] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077970ac-ca2d-4dea-b1af-f4a69b35aa20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.017736] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181379MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1030.017899] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.018063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1031.036432] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1031.036668] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1031.049229] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af75c79-ae2a-4d2a-81ec-2f4b679c011a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.056959] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216cab76-fbf2-43ea-a951-bb61b0eb2a22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.085576] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479e9bcd-0091-4b0a-8602-5941ca07b141 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.091994] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ed7d9b-3491-43b5-9a7f-627ab3fdbc91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.104415] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.607400] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1031.608717] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1031.609085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.604289] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1032.604615] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.108802] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.109048] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.109117] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.456653] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.456823] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.457571] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.457938] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1034.457938] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1034.960801] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1090.456712] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1090.961061] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1090.961061] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1090.961061] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.961061] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1090.961829] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bedfbb9-3bce-4be5-97ef-011603e370e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.970414] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930c83a0-7d30-4d34-a23c-50c515487f34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.984258] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9369f0-8957-4ac5-a7c2-b3a8bca5a8cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.990397] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca1ca20-16d8-48d8-8eae-aba2470ee6c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.019614] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181363MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1091.019802] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.020010] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.038613] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1092.038872] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1092.051334] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e76fa91-5527-4416-9fe4-b1a9aa069ee8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.058995] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edececc7-fe5a-459a-aa6a-3ebe691e5766 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.087820] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b08ad0-c452-4ae8-91f9-be778ef50a0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.094277] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6812ab9c-b8a1-4fc9-a16b-4cc009e6864a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.106805] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.609770] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1092.611077] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1092.611264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1093.611642] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.612185] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.612185] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.612359] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.612426] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1093.612586] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1094.456920] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.457113] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1094.457226] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1094.959819] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1094.960215] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.960252] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.461015] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.958123] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.463652] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.463906] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.464100] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1152.464170] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1152.967275] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.967646] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1152.967707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1152.967857] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1152.968752] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16ec965-e20b-4d13-8d42-3c43506cc888 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.976887] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3205f95-0db3-48e1-bb08-a351e7ad1dcc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.990267] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49aaca8f-5ebf-4cd9-93db-2ed41d6e0d1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.996141] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddfa2278-e2c9-4727-88af-fab8fd58d5c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.024483] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181366MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1153.024625] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.024791] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.042742] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1154.042971] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1154.056758] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf25b647-66ad-41e6-9f6c-4bb73e58de3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.064241] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9886d4-4108-4bb2-a34f-718a1dc0b50b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.092585] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c20f469-96ab-4d2c-863e-23623d2f12c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.099046] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f4c51a-f593-4be0-bba6-4291043bc3ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.111562] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.614544] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1154.615777] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1154.615960] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1155.110699] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.111092] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.111092] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.456636] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.456829] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.457040] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1156.457381] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1156.457381] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1156.959923] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1157.458528] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1157.458528] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1157.961730] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] There are 0 instances to clean {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1158.457367] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.457546] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances with incomplete migration {{(pid=62816) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1202.559519] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.066070] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Getting list of instances from cluster (obj){ [ 1203.066070] env[62816]: value = "domain-c8" [ 1203.066070] env[62816]: _type = "ClusterComputeResource" [ 1203.066070] env[62816]: } {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1203.066070] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659835f4-4a16-4327-8e7a-1398376d6acb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.073417] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Got total of 0 instances {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1211.971356] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.452093] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.455658] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.455815] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1213.456826] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.960893] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1213.961134] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1213.961270] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1213.961405] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1213.962331] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1695a81e-bc4a-40b5-9007-372ac9d7089d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.970709] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc89980-4c7e-441c-9394-ae927f9ad9de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.984051] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c729ddc3-bd7e-4769-8295-b5d875010fbe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.989964] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5321ed3-18a9-4a33-9ece-236cc15732a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.018203] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181381MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1214.018325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.018507] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1215.133258] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1215.133520] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1215.148307] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1215.159280] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1215.159485] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1215.168148] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1215.182257] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1215.192815] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21703083-6f83-4e2e-8c2e-3787abe9a335 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.200466] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba96380a-6e74-4588-b2e0-b76f5e8a8d64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.228904] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c813e0e4-af78-46dc-9e89-a379c05c0120 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.235700] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee30baf2-9e94-4b4d-aa3a-3235055526b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.248234] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.751266] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.752595] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1215.752777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.734s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.752735] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.753078] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.753078] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1217.456525] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.457474] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1218.457845] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1218.457845] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1218.960549] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1271.457197] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1272.452635] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.457776] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1273.457776] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1275.456718] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.457086] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.457086] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.960869] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1275.961232] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.961319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1275.961478] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1275.962398] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca62b6b-a2da-4545-a04e-9801d6f23a30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.970290] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8b7d52-ce50-40fb-8e17-a9d2daa84564 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.984059] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349a7c20-dfc1-4f8a-81f9-3b895ba2afa7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.990030] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000bb460-5223-4c16-bbbe-690e68ed2647 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.017699] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181352MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1276.017883] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1276.018017] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.037401] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1277.037664] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1277.051178] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e03cf7-1207-49d3-95f3-f9b141e875a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.058556] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066710a9-727b-40b2-b231-4e23ab193e2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.091519] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d48f676-e396-4bad-bc93-0e1a58f675ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.098599] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5764d66-bc5c-4562-a646-05ec08471697 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.111168] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1277.614263] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1277.615507] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1277.615688] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.598s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1278.611422] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.115768] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.115956] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.458378] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.458785] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1280.458785] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1280.961458] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1332.456552] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1333.453240] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.456999] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1334.457326] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1335.456982] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.960616] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.960970] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.961068] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.961218] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1335.962147] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dcf878-4a57-4907-9957-83ca944aea61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.970572] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4590eded-06f7-4e28-b49f-ac519a199327 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.984218] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f521e0ed-5eeb-482c-9c22-16d99aaea9e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.990184] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f843d9f1-3cff-426c-ad06-6b975486d3c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.018614] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181356MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1336.018832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.018965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.043239] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1337.043597] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1337.060935] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b79919-2278-44fa-92c1-9bacdbbf73b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.069747] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf03622-9d5b-40f2-a652-6900cd124d02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.105955] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5656876-eb8b-48ba-bd9e-dbdfe9e0cc94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.115547] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd84013-bcea-4ffe-89ec-d31923607d79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.129104] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1337.632665] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.633953] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1337.634141] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.615s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.633619] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1338.634293] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.457078] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1339.457326] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.459350] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1340.459739] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1340.459739] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1340.964667] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1354.325931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.326563] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.632702] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "99bd7579-7097-41df-a8c0-e12a3863a3dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.633262] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1354.832314] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1354.969372] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "666d5105-ee2e-4691-b13c-bd7feb045959" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.969561] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "666d5105-ee2e-4691-b13c-bd7feb045959" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.136819] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1355.377389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.377389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.379404] env[62816]: INFO nova.compute.claims [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1355.477289] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1355.666365] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.011012] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1356.474263] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70080588-d35b-442f-8d81-5d42982bb37d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.482657] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d40fe9d3-ba81-41f0-a165-75b970f3cc40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.521982] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91296f6f-1c9e-44ba-80d2-aee3a3217c55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.529256] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dede36-867d-4027-89ee-bf35da034cfd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.543492] env[62816]: DEBUG nova.compute.provider_tree [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1357.047589] env[62816]: DEBUG nova.scheduler.client.report [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1357.359140] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.360381] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.555517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.178s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.556198] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1357.559503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.893s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.562244] env[62816]: INFO nova.compute.claims [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1357.864951] env[62816]: DEBUG nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1358.066379] env[62816]: DEBUG nova.compute.utils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1358.068784] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1358.069113] env[62816]: DEBUG nova.network.neutron [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1358.396331] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1358.417434] env[62816]: DEBUG nova.policy [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f062fc536a1c4bbeabcb41197b1bc4fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f981032701b04b14841045ed05cbe9a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1358.576976] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1358.688454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6985d7ec-4641-46c2-8c9c-4392f0cd0891 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.696177] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b819ef5-3819-4bd4-850a-8a542b124a4c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.726774] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81cdf92-f8ba-451d-b2e0-bae08b7f4ecf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.735521] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754203af-26a4-46be-88be-55ca104b1496 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.749630] env[62816]: DEBUG nova.compute.provider_tree [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1358.981129] env[62816]: DEBUG nova.network.neutron [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Successfully created port: c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1359.253114] env[62816]: DEBUG nova.scheduler.client.report [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1359.415110] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.415327] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.595335] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1359.624997] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1359.625367] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1359.625447] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1359.625684] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1359.625775] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1359.625924] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1359.626176] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1359.626338] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1359.626506] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1359.626669] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1359.626841] env[62816]: DEBUG nova.virt.hardware [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1359.627740] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb63e9c-24f6-452c-bc51-8fcd270f9caf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.635510] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78165a3c-4a60-4847-bc30-08fde3a3d655 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.656361] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11021073-01d5-48df-b0cc-718267b29031 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.758973] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.199s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.759618] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1359.762520] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.752s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.764033] env[62816]: INFO nova.compute.claims [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1359.918233] env[62816]: DEBUG nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1360.268125] env[62816]: DEBUG nova.compute.utils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1360.274470] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1360.274844] env[62816]: DEBUG nova.network.neutron [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1360.365212] env[62816]: DEBUG nova.policy [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '932a4f8d29fb472fb09983fd4c84288b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f09a23020874a6798ef4d132f6ec845', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1360.453868] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.774781] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1360.844445] env[62816]: DEBUG nova.network.neutron [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Successfully created port: 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1360.888223] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8d843a-f507-4d8b-ae9c-7a8ec9fcc204 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.898387] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72eb24be-f557-436f-b7cd-7ff7be9a7e75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.940071] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c72532-9f89-42d6-a13d-a9406efe87e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.948604] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f8b4a5-7ff1-4bf4-89b8-7164123bf99c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.963878] env[62816]: DEBUG nova.compute.provider_tree [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1361.377150] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "7be4c8f8-240c-4a71-93bb-aeb94243d781" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.378107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "7be4c8f8-240c-4a71-93bb-aeb94243d781" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.468045] env[62816]: DEBUG nova.scheduler.client.report [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1361.790290] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1361.817931] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1361.818214] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1361.818323] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1361.818500] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1361.818639] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1361.818778] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1361.819069] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1361.819196] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1361.819359] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1361.819515] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1361.819680] env[62816]: DEBUG nova.virt.hardware [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1361.820877] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631476c0-c2f4-4482-ab73-afb4ee6f8e51 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.829330] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d119d4-a1e8-4a6b-afa4-127a88176752 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.884223] env[62816]: DEBUG nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1361.973057] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.973617] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1361.979131] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.583s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.981428] env[62816]: INFO nova.compute.claims [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1362.412342] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.485438] env[62816]: DEBUG nova.compute.utils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1362.486905] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1362.487090] env[62816]: DEBUG nova.network.neutron [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1362.709177] env[62816]: DEBUG nova.policy [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72bdf2df4b2b494285f821961fbb6721', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71f62ae21e354a46bac67b6be2d642a5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1362.992245] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1363.119876] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc338216-5fa8-4b03-9f7a-a612fd02d988 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.128420] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc0a9e7-c531-417b-bfa3-959b9c472f13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.166123] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b7af4c-c64f-4587-8e7b-e477fe471221 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.177188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51aedb5-b37f-4cab-8b65-106502f9455d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.191489] env[62816]: DEBUG nova.compute.provider_tree [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1363.592868] env[62816]: DEBUG nova.network.neutron [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Successfully created port: 19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1363.695933] env[62816]: DEBUG nova.scheduler.client.report [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1364.006529] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1364.055694] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1364.055694] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1364.055694] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1364.055898] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1364.055898] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1364.056814] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1364.057109] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1364.057716] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1364.057909] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1364.058105] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1364.058288] env[62816]: DEBUG nova.virt.hardware [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1364.059191] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9c4fad-381d-41d6-a9ec-5e5f7c50ec0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.076450] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876e9247-3047-44a8-afa6-6bd8c23415b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.158244] env[62816]: DEBUG nova.network.neutron [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Successfully updated port: 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.201728] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.222s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.202247] env[62816]: DEBUG nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1364.206064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.753s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.209232] env[62816]: INFO nova.compute.claims [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1364.245025] env[62816]: DEBUG nova.network.neutron [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Successfully updated port: c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1364.662259] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.662316] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.662582] env[62816]: DEBUG nova.network.neutron [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.710190] env[62816]: DEBUG nova.compute.utils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1364.710190] env[62816]: DEBUG nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1364.747991] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.749998] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.749998] env[62816]: DEBUG nova.network.neutron [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1365.211140] env[62816]: DEBUG nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1365.245290] env[62816]: DEBUG nova.network.neutron [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1365.285320] env[62816]: DEBUG nova.compute.manager [req-0e71b07a-58a7-4fd4-9a37-c85ff20e8c80 req-e092ff6b-fcae-4589-abc8-8d723c250c8c service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-vif-plugged-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1365.285320] env[62816]: DEBUG oslo_concurrency.lockutils [req-0e71b07a-58a7-4fd4-9a37-c85ff20e8c80 req-e092ff6b-fcae-4589-abc8-8d723c250c8c service nova] Acquiring lock "99bd7579-7097-41df-a8c0-e12a3863a3dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1365.285320] env[62816]: DEBUG oslo_concurrency.lockutils [req-0e71b07a-58a7-4fd4-9a37-c85ff20e8c80 req-e092ff6b-fcae-4589-abc8-8d723c250c8c service nova] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1365.285439] env[62816]: DEBUG oslo_concurrency.lockutils [req-0e71b07a-58a7-4fd4-9a37-c85ff20e8c80 req-e092ff6b-fcae-4589-abc8-8d723c250c8c service nova] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1365.285575] env[62816]: DEBUG nova.compute.manager [req-0e71b07a-58a7-4fd4-9a37-c85ff20e8c80 req-e092ff6b-fcae-4589-abc8-8d723c250c8c service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] No waiting events found dispatching network-vif-plugged-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1365.285744] env[62816]: WARNING nova.compute.manager [req-0e71b07a-58a7-4fd4-9a37-c85ff20e8c80 req-e092ff6b-fcae-4589-abc8-8d723c250c8c service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received unexpected event network-vif-plugged-1443be92-279c-4376-8c5d-2dff1bb3f82f for instance with vm_state building and task_state spawning. [ 1365.352432] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798d67c3-ba92-47a0-ba37-4ac7bb0b4c8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.355898] env[62816]: DEBUG nova.network.neutron [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1365.367479] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682d1282-0fdb-4a84-83a0-4fe0230939ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.398470] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a4ad43-e852-4256-9972-32d60393637d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.406984] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d531858a-e28e-455d-88a0-59885d51ea2b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.423516] env[62816]: DEBUG nova.compute.provider_tree [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1365.620648] env[62816]: DEBUG nova.network.neutron [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Successfully updated port: 19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1365.742878] env[62816]: DEBUG nova.network.neutron [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.924305] env[62816]: DEBUG nova.network.neutron [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.927133] env[62816]: DEBUG nova.scheduler.client.report [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1366.123908] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.124094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquired lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.124293] env[62816]: DEBUG nova.network.neutron [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1366.229271] env[62816]: DEBUG nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1366.247623] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.247623] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Instance network_info: |[{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1366.247744] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:79:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1443be92-279c-4376-8c5d-2dff1bb3f82f', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1366.267906] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.270484] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8bb10e6-8726-43fd-8a1a-f61555ec3755 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.281106] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1366.281409] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1366.281566] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1366.281784] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1366.281923] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1366.282355] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1366.282612] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1366.282801] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1366.283204] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1366.283513] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1366.283812] env[62816]: DEBUG nova.virt.hardware [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1366.284878] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce17e03c-a50d-4113-afa6-a5428abff601 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.290669] env[62816]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1366.290905] env[62816]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62816) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1366.291483] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Folder already exists: OpenStack. Parent ref: group-v4. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1366.291666] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating folder: Project (2f09a23020874a6798ef4d132f6ec845). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.293438] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3dd3fbf6-d7c0-4249-ab94-e6a79a8b5fe7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.311568] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658f202c-03e5-4010-ac38-d1c616fca71e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.317084] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Created folder: Project (2f09a23020874a6798ef4d132f6ec845) in parent group-v370905. [ 1366.317337] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating folder: Instances. Parent ref: group-v370910. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.317610] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6006eb94-a399-4cc3-af34-83947003fd3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.332555] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1366.339708] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Creating folder: Project (47cf283fe8984c279f2f83fb9ae537e8). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.342068] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb570d7a-5e9d-4286-b139-9485af262310 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.344072] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Created folder: Instances in parent group-v370910. [ 1366.344072] env[62816]: DEBUG oslo.service.loopingcall [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.344072] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1366.347018] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85318731-afa0-45b9-b6bc-b0a47eedbb38 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.361725] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Created folder: Project (47cf283fe8984c279f2f83fb9ae537e8) in parent group-v370905. [ 1366.361877] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Creating folder: Instances. Parent ref: group-v370912. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.364756] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9887ed14-875d-4319-9629-a8c59a0ec858 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.370652] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.370652] env[62816]: value = "task-1787854" [ 1366.370652] env[62816]: _type = "Task" [ 1366.370652] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.375751] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Created folder: Instances in parent group-v370912. [ 1366.376014] env[62816]: DEBUG oslo.service.loopingcall [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.377053] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1366.377053] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d36e6dc-6d7e-4a10-a256-c44d4f1dd3f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.394710] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787854, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.405040] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.405040] env[62816]: value = "task-1787856" [ 1366.405040] env[62816]: _type = "Task" [ 1366.405040] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.417222] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787856, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.425081] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1366.425551] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Instance network_info: |[{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1366.425984] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:3d:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c37f87b0-7404-4bad-89e7-5ebbccb43aad', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1366.434291] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Creating folder: Project (f981032701b04b14841045ed05cbe9a6). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.435520] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.436115] env[62816]: DEBUG nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1366.439069] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8609c0e6-d85a-4c81-a816-76b4237edc7d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.442130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.030s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.446161] env[62816]: INFO nova.compute.claims [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1366.461153] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "927badc2-decf-49af-b2c0-d95b471272c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.461153] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "927badc2-decf-49af-b2c0-d95b471272c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.461153] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Created folder: Project (f981032701b04b14841045ed05cbe9a6) in parent group-v370905. [ 1366.461153] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Creating folder: Instances. Parent ref: group-v370916. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1366.461153] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ac1fe5f-4578-469a-b782-f1230b41ac28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.475593] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Created folder: Instances in parent group-v370916. [ 1366.475851] env[62816]: DEBUG oslo.service.loopingcall [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.476057] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1366.476272] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80cb80c6-d54f-456b-8b72-095abaa6c445 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.503838] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1366.503838] env[62816]: value = "task-1787859" [ 1366.503838] env[62816]: _type = "Task" [ 1366.503838] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.512771] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787859, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.673855] env[62816]: DEBUG nova.network.neutron [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1366.882563] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787854, 'name': CreateVM_Task, 'duration_secs': 0.447752} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.882653] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1366.904016] env[62816]: DEBUG oslo_vmware.service [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d22df33-5514-44e1-974f-8dc2208b5fc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.916828] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.917053] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1366.919701] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1366.919701] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d218a974-fddb-458e-b660-d9bd1dc0da3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.929198] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787856, 'name': CreateVM_Task, 'duration_secs': 0.33127} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.929646] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1366.930839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1366.933871] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1366.933871] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5242bb41-7d13-9080-1051-28b6ee6856c4" [ 1366.933871] env[62816]: _type = "Task" [ 1366.933871] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.944646] env[62816]: DEBUG nova.compute.utils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1366.946203] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5242bb41-7d13-9080-1051-28b6ee6856c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.946597] env[62816]: DEBUG nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1366.962329] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1366.969729] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.970055] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.018619] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787859, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.047155] env[62816]: DEBUG nova.network.neutron [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Updating instance_info_cache with network_info: [{"id": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "address": "fa:16:3e:16:82:4e", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.72", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19d501a7-a6", "ovs_interfaceid": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.442783] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.443196] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1367.443303] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.443442] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.444088] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1367.444165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.444524] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1367.444626] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-441784ca-4000-43e6-bcfe-894ff293f393 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.446460] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae3f0389-581f-4880-a3c7-687f8589b0c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.449940] env[62816]: DEBUG nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1367.453506] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1367.453506] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527b538f-cc35-9fc0-1df2-7869674ba884" [ 1367.453506] env[62816]: _type = "Task" [ 1367.453506] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.462453] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1367.462626] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1367.466014] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a28e5f-d8ed-463f-b029-35c3f71971d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.468898] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527b538f-cc35-9fc0-1df2-7869674ba884, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.474046] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1367.479653] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec110afc-89d9-41cd-a0fe-d081003f9b4c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.484571] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1367.484571] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d7994e-b7a6-1284-43bd-372e8da12acd" [ 1367.484571] env[62816]: _type = "Task" [ 1367.484571] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.496364] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d7994e-b7a6-1284-43bd-372e8da12acd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.497334] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.514872] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787859, 'name': CreateVM_Task, 'duration_secs': 0.622635} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.514872] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1367.515241] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.551974] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Releasing lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.552230] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Instance network_info: |[{"id": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "address": "fa:16:3e:16:82:4e", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.72", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19d501a7-a6", "ovs_interfaceid": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1367.552631] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:82:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19d501a7-a6e8-4c20-95c7-6546ec0a4bba', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1367.561095] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Creating folder: Project (71f62ae21e354a46bac67b6be2d642a5). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1367.565534] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-175ad4da-d4af-43ec-ac7a-22bec3256756 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.575063] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Created folder: Project (71f62ae21e354a46bac67b6be2d642a5) in parent group-v370905. [ 1367.575269] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Creating folder: Instances. Parent ref: group-v370919. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1367.575498] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79e3dfda-3dcb-47e4-91c0-7c45bc5c0a8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.584299] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Created folder: Instances in parent group-v370919. [ 1367.584524] env[62816]: DEBUG oslo.service.loopingcall [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1367.584708] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1367.584905] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-053f2af0-e11b-4c53-9991-0d3fe9e4fb67 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.603123] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1367.603123] env[62816]: value = "task-1787862" [ 1367.603123] env[62816]: _type = "Task" [ 1367.603123] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.611043] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787862, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.714684] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d499e45d-037a-4907-ab86-adfcb1c66c99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.728439] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01b7658-6cfe-4ffa-b9ba-adc7956340fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.766227] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f366d7e1-52c2-4c62-b024-f6a2a52b8006 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.774732] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c98a713-c0f4-4577-beeb-124507fdd7b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.793273] env[62816]: DEBUG nova.compute.provider_tree [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.971213] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1367.971511] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1367.971838] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1367.972015] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1367.972317] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1367.973095] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-857f0515-077b-4b2a-85a2-83379340c8ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.984094] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1367.984094] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52684390-ecbc-cdb4-0923-c9d7ba9aafe6" [ 1367.984094] env[62816]: _type = "Task" [ 1367.984094] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.008882] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.009276] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1368.009569] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.009983] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Preparing fetch location {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1368.010169] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating directory with path [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1368.010380] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9223a9c7-8c27-4d7c-a976-ba314ce2bce9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.024395] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.026618] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Created directory with path [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1368.026950] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Fetch image to [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1368.027099] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Downloading image file data 844838ed-b150-482e-a0f6-dcce37470b52 to [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk on the data store datastore1 {{(pid=62816) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1368.028317] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d46d3de-0ce6-44c0-97b7-ec423217743e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.041454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d612be-f505-4b42-ac1c-a9c26382a397 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.054010] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad5b3e1-b914-4a5a-896c-da828114dec2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.092202] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293b0d1a-60c9-4691-8347-dd92b75d4937 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.094850] env[62816]: DEBUG nova.compute.manager [req-e1a2626e-feb5-4fd5-94f6-4e9bb6a9cbfa req-634d073d-3371-45ce-8384-f033eb00ccb9 service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Received event network-vif-plugged-19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1368.095171] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1a2626e-feb5-4fd5-94f6-4e9bb6a9cbfa req-634d073d-3371-45ce-8384-f033eb00ccb9 service nova] Acquiring lock "666d5105-ee2e-4691-b13c-bd7feb045959-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.095417] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1a2626e-feb5-4fd5-94f6-4e9bb6a9cbfa req-634d073d-3371-45ce-8384-f033eb00ccb9 service nova] Lock "666d5105-ee2e-4691-b13c-bd7feb045959-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.095596] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1a2626e-feb5-4fd5-94f6-4e9bb6a9cbfa req-634d073d-3371-45ce-8384-f033eb00ccb9 service nova] Lock "666d5105-ee2e-4691-b13c-bd7feb045959-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.095802] env[62816]: DEBUG nova.compute.manager [req-e1a2626e-feb5-4fd5-94f6-4e9bb6a9cbfa req-634d073d-3371-45ce-8384-f033eb00ccb9 service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] No waiting events found dispatching network-vif-plugged-19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1368.096147] env[62816]: WARNING nova.compute.manager [req-e1a2626e-feb5-4fd5-94f6-4e9bb6a9cbfa req-634d073d-3371-45ce-8384-f033eb00ccb9 service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Received unexpected event network-vif-plugged-19d501a7-a6e8-4c20-95c7-6546ec0a4bba for instance with vm_state building and task_state spawning. [ 1368.101189] env[62816]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5ea62c1b-713b-4fa8-80bc-d4192b0d7659 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.112836] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787862, 'name': CreateVM_Task, 'duration_secs': 0.38439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.115980] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1368.117082] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.117261] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.117575] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1368.117824] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da053670-3881-4114-a155-ee9c55c98709 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.121651] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Downloading image file data 844838ed-b150-482e-a0f6-dcce37470b52 to the data store datastore1 {{(pid=62816) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1368.126621] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1368.126621] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f4ca1c-ceec-3b82-92c5-307431ebea95" [ 1368.126621] env[62816]: _type = "Task" [ 1368.126621] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.138778] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f4ca1c-ceec-3b82-92c5-307431ebea95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.192936] env[62816]: DEBUG oslo_vmware.rw_handles [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62816) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1368.295837] env[62816]: DEBUG nova.scheduler.client.report [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1368.463474] env[62816]: DEBUG nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1368.497148] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1368.497148] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1368.497148] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.497148] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1368.497641] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.497641] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1368.497641] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1368.497641] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1368.497641] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1368.497787] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1368.497787] env[62816]: DEBUG nova.virt.hardware [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1368.498076] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddac7a84-93cf-4c57-a294-38ac7f3f6a2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.510795] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044897df-4f4b-4287-9d64-4ea0b00659af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.526836] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1368.532821] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Creating folder: Project (c5ddfddb5446465aba7c4d4d299b3086). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1368.536852] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-36575e70-6f69-494d-8b4e-4c0a8dd29768 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.545128] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Created folder: Project (c5ddfddb5446465aba7c4d4d299b3086) in parent group-v370905. [ 1368.545387] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Creating folder: Instances. Parent ref: group-v370922. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1368.545844] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0c641958-4fe7-47c6-b1b1-f17a71301aa8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.555724] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Created folder: Instances in parent group-v370922. [ 1368.556112] env[62816]: DEBUG oslo.service.loopingcall [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1368.558973] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1368.559180] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85b58213-c9cd-4192-93d4-ca157459d15c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.577219] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1368.577219] env[62816]: value = "task-1787866" [ 1368.577219] env[62816]: _type = "Task" [ 1368.577219] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.588730] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787866, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.637784] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1368.637784] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1368.638058] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.791853] env[62816]: DEBUG nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Received event network-vif-plugged-c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1368.792132] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Acquiring lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.792829] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.793405] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.793627] env[62816]: DEBUG nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] No waiting events found dispatching network-vif-plugged-c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1368.793846] env[62816]: WARNING nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Received unexpected event network-vif-plugged-c37f87b0-7404-4bad-89e7-5ebbccb43aad for instance with vm_state building and task_state spawning. [ 1368.795260] env[62816]: DEBUG nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1368.795544] env[62816]: DEBUG nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing instance network info cache due to event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1368.795849] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Acquiring lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1368.796068] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Acquired lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1368.796226] env[62816]: DEBUG nova.network.neutron [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1368.802961] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.803256] env[62816]: DEBUG nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1368.806642] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.309s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.809174] env[62816]: INFO nova.compute.claims [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.964983] env[62816]: DEBUG oslo_vmware.rw_handles [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Completed reading data from the image iterator. {{(pid=62816) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1368.965305] env[62816]: DEBUG oslo_vmware.rw_handles [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1369.019522] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Downloaded image file data 844838ed-b150-482e-a0f6-dcce37470b52 to vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk on the data store datastore1 {{(pid=62816) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1369.022894] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Caching image {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1369.022894] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Copying Virtual Disk [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk to [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1369.023082] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd9081af-49d8-4708-a8dd-459b3fca7343 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.030780] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1369.030780] env[62816]: value = "task-1787867" [ 1369.030780] env[62816]: _type = "Task" [ 1369.030780] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.048406] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.088803] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787866, 'name': CreateVM_Task, 'duration_secs': 0.304136} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.089213] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1369.089885] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.090201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.091239] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1369.091723] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b5b379b-f0b8-4833-bd94-287aef11fb9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.102957] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1369.102957] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d1158d-3076-ee90-a300-49091a744760" [ 1369.102957] env[62816]: _type = "Task" [ 1369.102957] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.115282] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d1158d-3076-ee90-a300-49091a744760, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.315565] env[62816]: DEBUG nova.compute.utils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1369.320991] env[62816]: DEBUG nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1369.541073] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787867, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.612931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1369.613226] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1369.613569] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.682447] env[62816]: DEBUG nova.network.neutron [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updated VIF entry in instance network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1369.682765] env[62816]: DEBUG nova.network.neutron [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1369.822315] env[62816]: DEBUG nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1370.007038] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a669c0ae-528a-49d0-88f3-d34315e8106a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.016351] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cbf4063-ce9c-4aff-bee6-7f8d517d1c8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.058503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a170ca78-97ba-4bc9-897d-943e5056d67e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.068680] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.811458} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.069765] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94a742f-95ad-4252-9f34-04704d11b11e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.074008] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Copied Virtual Disk [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk to [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1370.074224] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleting the datastore file [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52/tmp-sparse.vmdk {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1370.074467] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1239b7b-4c85-437f-b300-3bc70bc02a7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.088148] env[62816]: DEBUG nova.compute.provider_tree [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.090866] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1370.090866] env[62816]: value = "task-1787869" [ 1370.090866] env[62816]: _type = "Task" [ 1370.090866] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.099552] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.187719] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Releasing lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.188013] env[62816]: DEBUG nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Received event network-changed-c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1370.188191] env[62816]: DEBUG nova.compute.manager [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Refreshing instance network info cache due to event network-changed-c37f87b0-7404-4bad-89e7-5ebbccb43aad. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1370.188506] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Acquiring lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1370.188659] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Acquired lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1370.188702] env[62816]: DEBUG nova.network.neutron [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Refreshing network info cache for port c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1370.593064] env[62816]: DEBUG nova.scheduler.client.report [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1370.615236] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025327} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.616179] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1370.616179] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Moving file from [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46/844838ed-b150-482e-a0f6-dcce37470b52 to [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52. {{(pid=62816) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1370.616392] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-f4621299-f4e7-4b12-883b-9bad6df81da5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.624051] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1370.624051] env[62816]: value = "task-1787870" [ 1370.624051] env[62816]: _type = "Task" [ 1370.624051] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.635384] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787870, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.840586] env[62816]: DEBUG nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1370.882492] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1370.882715] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1370.882866] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1370.883070] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1370.883216] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1370.883356] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1370.883559] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1370.883708] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1370.883866] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1370.884044] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1370.884220] env[62816]: DEBUG nova.virt.hardware [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1370.885096] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d51665-ac07-40ce-915f-aadd91aa546e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.893165] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc40d2a2-8f8e-440a-a4fb-3bf939d78835 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.908105] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1370.918878] env[62816]: DEBUG oslo.service.loopingcall [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.918878] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1370.918878] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-128602ba-dd3e-4d7b-a315-fa08dbb536ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.933867] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1370.933867] env[62816]: value = "task-1787871" [ 1370.933867] env[62816]: _type = "Task" [ 1370.933867] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.944836] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787871, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.107233] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1371.107836] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1371.111790] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.089s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.113608] env[62816]: INFO nova.compute.claims [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.141113] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787870, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024965} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.141384] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] File moved {{(pid=62816) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1371.141574] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Cleaning up location [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1371.142917] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleting the datastore file [datastore1] vmware_temp/4fb54fb7-44fb-4dae-9577-b021a2ab5d46 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1371.143231] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d4cedb8-dbc9-4160-8f3b-ce2055d6cb4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.152762] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1371.152762] env[62816]: value = "task-1787872" [ 1371.152762] env[62816]: _type = "Task" [ 1371.152762] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.166621] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787872, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.305811] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.306210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1371.450404] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787871, 'name': CreateVM_Task, 'duration_secs': 0.283084} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.450609] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1371.451015] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.451200] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.451499] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1371.452095] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-994280fa-f929-4c61-9219-ac6f0a73d340 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.456848] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1371.456848] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525e1045-6490-2891-907f-70b074d90a14" [ 1371.456848] env[62816]: _type = "Task" [ 1371.456848] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.469605] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525e1045-6490-2891-907f-70b074d90a14, 'name': SearchDatastore_Task, 'duration_secs': 0.00828} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.470064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.470305] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1371.471895] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.612806] env[62816]: DEBUG nova.compute.manager [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Received event network-changed-19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.613151] env[62816]: DEBUG nova.compute.manager [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Refreshing instance network info cache due to event network-changed-19d501a7-a6e8-4c20-95c7-6546ec0a4bba. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1371.613576] env[62816]: DEBUG oslo_concurrency.lockutils [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] Acquiring lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.613576] env[62816]: DEBUG oslo_concurrency.lockutils [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] Acquired lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.613576] env[62816]: DEBUG nova.network.neutron [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Refreshing network info cache for port 19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.622224] env[62816]: DEBUG nova.compute.utils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1371.626913] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1371.627024] env[62816]: DEBUG nova.network.neutron [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1371.631225] env[62816]: DEBUG nova.network.neutron [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updated VIF entry in instance network info cache for port c37f87b0-7404-4bad-89e7-5ebbccb43aad. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1371.631541] env[62816]: DEBUG nova.network.neutron [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1371.671762] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787872, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025622} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.672040] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1371.672774] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44d184e6-9f93-4faa-8fff-0add1c18cc00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.678818] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1371.678818] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f293a7-dc37-e1ab-15f8-41759f7ea38b" [ 1371.678818] env[62816]: _type = "Task" [ 1371.678818] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.690371] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f293a7-dc37-e1ab-15f8-41759f7ea38b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.809306] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1371.848481] env[62816]: DEBUG nova.policy [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9fdfaf9360f4dbb959bf3e8bcbee731', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8179e67e019493a894cd7c67825743c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1371.904232] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "66745316-2735-4c49-b1a2-f9e547211761" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1371.904622] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "66745316-2735-4c49-b1a2-f9e547211761" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.130342] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1372.135852] env[62816]: DEBUG oslo_concurrency.lockutils [req-77721b82-5250-45d2-b2d2-cf7b87ffe258 req-51889466-8cec-4b37-a57b-67c04b089dd4 service nova] Releasing lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.194684] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f293a7-dc37-e1ab-15f8-41759f7ea38b, 'name': SearchDatastore_Task, 'duration_secs': 0.017551} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.197677] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.197914] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 99bd7579-7097-41df-a8c0-e12a3863a3dc/99bd7579-7097-41df-a8c0-e12a3863a3dc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1372.198403] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.198596] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1372.199088] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abf0557f-074b-4655-85e1-69155b3fcd9d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.201188] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d9393ce-1265-4041-9d07-aa4ccebba723 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.208050] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1372.208050] env[62816]: value = "task-1787874" [ 1372.208050] env[62816]: _type = "Task" [ 1372.208050] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.217796] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.217796] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1372.217796] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ccdf73d-be26-47a4-a836-07a7b8aa3ef9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.225950] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.227693] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1372.227693] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521e6fd3-8fe7-69bc-b656-99f30b527325" [ 1372.227693] env[62816]: _type = "Task" [ 1372.227693] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.242146] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521e6fd3-8fe7-69bc-b656-99f30b527325, 'name': SearchDatastore_Task, 'duration_secs': 0.009547} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.245918] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f99b8aae-cc35-48bc-a9da-aa954975b581 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.252170] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1372.252170] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e63148-c436-1969-ab8b-692ae4cba6c8" [ 1372.252170] env[62816]: _type = "Task" [ 1372.252170] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.260410] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e63148-c436-1969-ab8b-692ae4cba6c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.339801] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.359199] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e76cd8-0980-4f1c-82e8-bff317ca1bbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.369417] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd91020-fe42-4d4a-92c3-a7bf78ef5eb7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.405336] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf5b8ca-84be-43dd-be6e-927edc681c4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.409105] env[62816]: DEBUG nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1372.417783] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5110bbcd-a0f5-4f41-95bc-282c3b4641d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.432431] env[62816]: DEBUG nova.compute.provider_tree [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.476320] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "f6ddaab3-d420-4ee4-bf75-486228826635" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.476320] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "f6ddaab3-d420-4ee4-bf75-486228826635" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1372.728264] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787874, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.763879] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e63148-c436-1969-ab8b-692ae4cba6c8, 'name': SearchDatastore_Task, 'duration_secs': 0.008494} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.764197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.764466] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f06102d6-be5c-40d1-ae1d-8ae8190fd0d7/f06102d6-be5c-40d1-ae1d-8ae8190fd0d7.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1372.764800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1372.765012] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1372.765522] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37137d87-0e35-4095-a6f8-2a7e78eeeacc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.769109] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a33cded2-bed6-4e50-a699-4329645925fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.776655] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1372.776655] env[62816]: value = "task-1787875" [ 1372.776655] env[62816]: _type = "Task" [ 1372.776655] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.784511] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1372.784864] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1372.786409] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1ca0a26-be04-4c8a-b1f6-1876845b0ae1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.793626] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.796976] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1372.796976] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525aa4b6-05ff-3e0a-5f11-4bcd560daed7" [ 1372.796976] env[62816]: _type = "Task" [ 1372.796976] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.807551] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525aa4b6-05ff-3e0a-5f11-4bcd560daed7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.935426] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1372.936468] env[62816]: DEBUG nova.scheduler.client.report [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1373.024852] env[62816]: DEBUG nova.network.neutron [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Updated VIF entry in instance network info cache for port 19d501a7-a6e8-4c20-95c7-6546ec0a4bba. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1373.025142] env[62816]: DEBUG nova.network.neutron [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Updating instance_info_cache with network_info: [{"id": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "address": "fa:16:3e:16:82:4e", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.72", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19d501a7-a6", "ovs_interfaceid": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1373.050397] env[62816]: DEBUG nova.network.neutron [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Successfully created port: 0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1373.147288] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1373.186887] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1373.187136] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1373.187296] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.187476] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1373.188311] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.189391] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1373.189391] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1373.189391] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1373.189391] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1373.189391] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1373.189874] env[62816]: DEBUG nova.virt.hardware [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1373.190819] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c578e31-8e70-4d14-a85a-60965efc394d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.205387] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f25939-866d-43a3-bc78-a975108f3406 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.231051] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787874, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.010894} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.231051] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 99bd7579-7097-41df-a8c0-e12a3863a3dc/99bd7579-7097-41df-a8c0-e12a3863a3dc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1373.231051] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1373.233187] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c87c61e-4baa-4437-b8ed-c9df692971b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.237765] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1373.237765] env[62816]: value = "task-1787876" [ 1373.237765] env[62816]: _type = "Task" [ 1373.237765] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.248892] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787876, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.288661] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.309044] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525aa4b6-05ff-3e0a-5f11-4bcd560daed7, 'name': SearchDatastore_Task, 'duration_secs': 0.129337} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.310560] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3da4f8be-7a71-467c-a1f1-5ef4723e6f70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.316267] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1373.316267] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b23524-5ff9-863e-718b-17474cddc378" [ 1373.316267] env[62816]: _type = "Task" [ 1373.316267] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.326175] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b23524-5ff9-863e-718b-17474cddc378, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.447403] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.450336] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1373.459184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.120s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.461845] env[62816]: INFO nova.compute.claims [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1373.528924] env[62816]: DEBUG oslo_concurrency.lockutils [req-efa75e22-0bbd-44e6-b91e-6b912d8c4204 req-7324c1b6-8624-4cd9-926b-4b396c7389ef service nova] Releasing lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.753800] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787876, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.126143} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.754263] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1373.755943] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154d1660-1321-42af-a93d-81855b108c43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.784391] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 99bd7579-7097-41df-a8c0-e12a3863a3dc/99bd7579-7097-41df-a8c0-e12a3863a3dc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1373.784773] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07708bef-0f8b-41ea-9af0-f1ad825a1c8b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.811829] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787875, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.813529] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1373.813529] env[62816]: value = "task-1787877" [ 1373.813529] env[62816]: _type = "Task" [ 1373.813529] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.827598] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b23524-5ff9-863e-718b-17474cddc378, 'name': SearchDatastore_Task, 'duration_secs': 0.012495} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.831958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1373.832156] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd/3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1373.832298] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.832541] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1373.832715] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1373.833171] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-caa9cf46-952d-43de-bccf-d76d9f55e09b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.835948] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21c62bf7-f2e8-4921-8b43-f0b63c3f75e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.843087] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1373.843087] env[62816]: value = "task-1787878" [ 1373.843087] env[62816]: _type = "Task" [ 1373.843087] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.853810] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.857830] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1373.859561] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1373.860434] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a881a5b-21b9-4a89-91cc-2acab564c458 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.870720] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1373.870720] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52378078-f4d8-062c-4876-13ada899fb57" [ 1373.870720] env[62816]: _type = "Task" [ 1373.870720] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.879516] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52378078-f4d8-062c-4876-13ada899fb57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.962805] env[62816]: DEBUG nova.compute.utils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1373.965162] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1373.965162] env[62816]: DEBUG nova.network.neutron [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1374.143429] env[62816]: DEBUG nova.policy [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f74c3c2e1f1746d6933c6eee399830f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55ac373cda544fdda9b58434d070d395', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1374.246733] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "de33d02f-7e34-4619-a2ed-cda6c54aa030" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1374.247109] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.297340] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787875, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.517688} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.297782] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f06102d6-be5c-40d1-ae1d-8ae8190fd0d7/f06102d6-be5c-40d1-ae1d-8ae8190fd0d7.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1374.298017] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1374.298284] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-879a160a-597b-4f5c-852b-df82434b5743 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.304852] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1374.304852] env[62816]: value = "task-1787879" [ 1374.304852] env[62816]: _type = "Task" [ 1374.304852] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.316212] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.325510] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.354557] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.384498] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52378078-f4d8-062c-4876-13ada899fb57, 'name': SearchDatastore_Task, 'duration_secs': 0.176624} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.384498] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49372781-2e5c-48af-abd2-e858ba934ed8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.389924] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1374.389924] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5263e80d-4fdd-5a84-b67d-11c3e8a7d4df" [ 1374.389924] env[62816]: _type = "Task" [ 1374.389924] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.398942] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5263e80d-4fdd-5a84-b67d-11c3e8a7d4df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.471780] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1374.699811] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2167426-16b5-4628-953f-648e30b61d1d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.707957] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1dc384-3eb6-4cc8-8862-d838478d57fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.740883] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54db314-669a-410a-b860-cf85fe0d57b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.749038] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30afc88e-e170-4dfb-8f87-a6e02a3f839b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.764019] env[62816]: DEBUG nova.compute.provider_tree [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1374.821616] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.218002} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.828668] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1374.828668] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d81aa4-6282-4cd4-baa9-1f919a03a119 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.843457] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.864424] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] f06102d6-be5c-40d1-ae1d-8ae8190fd0d7/f06102d6-be5c-40d1-ae1d-8ae8190fd0d7.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.871916] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9471c15-b543-4fd9-81f8-5e1ee6fd93ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.910022] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787878, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.910998] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1374.910998] env[62816]: value = "task-1787880" [ 1374.910998] env[62816]: _type = "Task" [ 1374.910998] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.923642] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.928521] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5263e80d-4fdd-5a84-b67d-11c3e8a7d4df, 'name': SearchDatastore_Task, 'duration_secs': 0.015086} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.928786] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1374.929127] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 666d5105-ee2e-4691-b13c-bd7feb045959/666d5105-ee2e-4691-b13c-bd7feb045959.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1374.929487] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.929804] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1374.930119] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5eb86574-bb13-411e-a663-fa83ba2ed5d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.933050] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5292bcb-d786-4df6-b799-bf81670a06d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.941958] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1374.941958] env[62816]: value = "task-1787881" [ 1374.941958] env[62816]: _type = "Task" [ 1374.941958] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.951378] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1374.951378] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1374.955464] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab7b8adc-04a4-49a3-92a6-d9a06c0ecced {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.958716] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.962381] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1374.962381] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b89aa7-a401-afa7-1716-9ad1ec5cff7e" [ 1374.962381] env[62816]: _type = "Task" [ 1374.962381] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.974914] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b89aa7-a401-afa7-1716-9ad1ec5cff7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.271145] env[62816]: DEBUG nova.scheduler.client.report [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1375.340056] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787877, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.357966] env[62816]: DEBUG nova.network.neutron [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Successfully created port: 625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.372301] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787878, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.431274] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787880, 'name': ReconfigVM_Task, 'duration_secs': 0.372612} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.431907] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Reconfigured VM instance instance-00000004 to attach disk [datastore1] f06102d6-be5c-40d1-ae1d-8ae8190fd0d7/f06102d6-be5c-40d1-ae1d-8ae8190fd0d7.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1375.432982] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5da220a2-db40-442c-ad58-70bab842d76d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.445977] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1375.445977] env[62816]: value = "task-1787882" [ 1375.445977] env[62816]: _type = "Task" [ 1375.445977] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.467396] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.479017] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787882, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.487782] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1375.490711] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b89aa7-a401-afa7-1716-9ad1ec5cff7e, 'name': SearchDatastore_Task, 'duration_secs': 0.111231} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.496949] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc695979-00fe-4d5c-93f0-57a121e478dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.500532] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "455052cc-292a-414c-8c83-bc512c49a197" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.501516] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.506720] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1375.506720] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523d4c9b-6031-870a-a8a9-a5e00507ce5c" [ 1375.506720] env[62816]: _type = "Task" [ 1375.506720] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.523823] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523d4c9b-6031-870a-a8a9-a5e00507ce5c, 'name': SearchDatastore_Task, 'duration_secs': 0.008834} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.523823] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.524083] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1375.524364] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.524499] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1375.527707] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-84af9d10-e95d-4f22-8234-af110df5e45c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.529319] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1375.530079] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1375.530079] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1375.530079] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1375.530079] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1375.530079] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1375.530234] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1375.530372] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1375.530535] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1375.531185] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1375.531185] env[62816]: DEBUG nova.virt.hardware [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1375.531285] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dba1ccb7-e879-4036-b024-df9bbb78df46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.535410] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31120970-7a07-41e3-9e4d-ac8d45b5c1df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.545317] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a439fa-8928-468c-8bc9-35c9ee5b508b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.549797] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1375.549797] env[62816]: value = "task-1787883" [ 1375.549797] env[62816]: _type = "Task" [ 1375.549797] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.551568] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1375.551739] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1375.555931] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e846f05-bf79-4de0-876d-5b80e6d11427 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.571335] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1375.571335] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52319d0f-c086-e9fc-9656-aeb5286032dc" [ 1375.571335] env[62816]: _type = "Task" [ 1375.571335] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.574885] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.584203] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52319d0f-c086-e9fc-9656-aeb5286032dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.778144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.319s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1375.778696] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1375.784724] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.849s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.793649] env[62816]: INFO nova.compute.claims [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1375.835178] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787877, 'name': ReconfigVM_Task, 'duration_secs': 1.69017} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.835178] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 99bd7579-7097-41df-a8c0-e12a3863a3dc/99bd7579-7097-41df-a8c0-e12a3863a3dc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1375.835178] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-156964ee-accb-47be-a9cc-623d520cb344 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.843806] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1375.843806] env[62816]: value = "task-1787884" [ 1375.843806] env[62816]: _type = "Task" [ 1375.843806] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.862343] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787884, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.874043] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787878, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.532061} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.874043] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd/3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1375.874043] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1375.874429] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-338618a6-9679-4435-87b5-9c383576f392 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.880534] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1375.880534] env[62816]: value = "task-1787885" [ 1375.880534] env[62816]: _type = "Task" [ 1375.880534] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.891643] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787885, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.957651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "fb84cb48-d1a1-4eec-adb8-8edc585263df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1375.957995] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1375.966846] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787881, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.972636] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787882, 'name': Rename_Task, 'duration_secs': 0.137934} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.972973] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1375.973327] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07387fb4-9912-4189-99a1-5ae55ef38022 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.982531] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1375.982531] env[62816]: value = "task-1787886" [ 1375.982531] env[62816]: _type = "Task" [ 1375.982531] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.990382] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.062451] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.086192] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52319d0f-c086-e9fc-9656-aeb5286032dc, 'name': SearchDatastore_Task, 'duration_secs': 0.106913} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.087452] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c135fde-33e6-43cd-934e-406edf85c904 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.095391] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1376.095391] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5220574b-d358-4b78-6cc6-e6e6140efb6b" [ 1376.095391] env[62816]: _type = "Task" [ 1376.095391] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.106964] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5220574b-d358-4b78-6cc6-e6e6140efb6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.132888] env[62816]: DEBUG nova.network.neutron [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Successfully updated port: 0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1376.249792] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "2bc7f973-007d-44bd-aae8-d3b62506efba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1376.250441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "2bc7f973-007d-44bd-aae8-d3b62506efba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1376.297735] env[62816]: DEBUG nova.compute.utils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1376.306330] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1376.306330] env[62816]: DEBUG nova.network.neutron [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1376.354818] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787884, 'name': Rename_Task, 'duration_secs': 0.289726} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.355084] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1376.355262] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7bbfe53f-8709-4d9d-bfa7-282f96fbb340 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.362013] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1376.362013] env[62816]: value = "task-1787887" [ 1376.362013] env[62816]: _type = "Task" [ 1376.362013] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.370655] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.393860] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787885, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077955} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.394228] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1376.395207] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366ad440-9d27-4c40-9101-7de20e4f0ddb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.419218] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd/3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1376.419890] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9d2e745-ccc3-4b68-a072-f20c1608f2d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.436773] env[62816]: DEBUG nova.policy [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9b48919c724430496c5d846d7300f52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c6b942889914783a95c2abb080137a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1376.444952] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1376.444952] env[62816]: value = "task-1787888" [ 1376.444952] env[62816]: _type = "Task" [ 1376.444952] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.455998] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787888, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.459462] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787881, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.095966} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.459790] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 666d5105-ee2e-4691-b13c-bd7feb045959/666d5105-ee2e-4691-b13c-bd7feb045959.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1376.459963] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1376.460217] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-413d3de3-7254-4127-8d50-d492d5d5bb7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.467755] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1376.467755] env[62816]: value = "task-1787889" [ 1376.467755] env[62816]: _type = "Task" [ 1376.467755] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.477828] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787889, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.489683] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787886, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.563246] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787883, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.608022] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5220574b-d358-4b78-6cc6-e6e6140efb6b, 'name': SearchDatastore_Task, 'duration_secs': 0.012342} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.608857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.609364] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1376.609674] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d4a4e4b-99a7-4fbc-9b3c-076c4c4b3b38 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.617114] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1376.617114] env[62816]: value = "task-1787890" [ 1376.617114] env[62816]: _type = "Task" [ 1376.617114] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.632037] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.636707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "refresh_cache-927badc2-decf-49af-b2c0-d95b471272c9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.636707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "refresh_cache-927badc2-decf-49af-b2c0-d95b471272c9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.636707] env[62816]: DEBUG nova.network.neutron [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.815725] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1376.884531] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787887, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.961216] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.986216] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787889, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074689} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.989711] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1376.993637] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28302680-6aa2-4976-b6e0-07739ee8b5c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.005040] env[62816]: DEBUG oslo_vmware.api [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787886, 'name': PowerOnVM_Task, 'duration_secs': 0.520225} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.016808] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1377.017376] env[62816]: INFO nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Took 10.79 seconds to spawn the instance on the hypervisor. [ 1377.017669] env[62816]: DEBUG nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1377.029121] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 666d5105-ee2e-4691-b13c-bd7feb045959/666d5105-ee2e-4691-b13c-bd7feb045959.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.033226] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea5ef51-9adb-4829-a2e6-187db70e090f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.036925] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b728be6-0149-45da-b8d8-be9c20aa5e8b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.067556] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1377.067556] env[62816]: value = "task-1787891" [ 1377.067556] env[62816]: _type = "Task" [ 1377.067556] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.077673] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787883, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.066627} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.078785] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1377.079114] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1377.079396] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-be8a4f15-1294-4053-8f0e-f22bb83ed7f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.086216] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787891, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.091289] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1377.091289] env[62816]: value = "task-1787892" [ 1377.091289] env[62816]: _type = "Task" [ 1377.091289] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.103553] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787892, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.131740] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787890, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.202485] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5282a3b-e4c4-4e41-a77a-28b6e7f754a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.210378] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf92f96-3b77-4128-8988-e6c7bbf951db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.246287] env[62816]: DEBUG nova.network.neutron [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Successfully created port: 7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1377.248726] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f342412d-8025-41f0-943b-bfc1a15c4103 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.256810] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19fae02-3731-45a8-a3fd-d3f1b8a6690f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.272219] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1377.275190] env[62816]: DEBUG nova.network.neutron [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1377.376174] env[62816]: DEBUG oslo_vmware.api [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1787887, 'name': PowerOnVM_Task, 'duration_secs': 0.917978} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.376451] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1377.376639] env[62816]: INFO nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Took 15.59 seconds to spawn the instance on the hypervisor. [ 1377.377074] env[62816]: DEBUG nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1377.377600] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f5df3b-0415-47d0-a50a-87e773a0e943 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.457307] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787888, 'name': ReconfigVM_Task, 'duration_secs': 0.623625} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.457694] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd/3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1377.458369] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69a889ad-695e-444b-a51f-0616272af26a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.465777] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1377.465777] env[62816]: value = "task-1787893" [ 1377.465777] env[62816]: _type = "Task" [ 1377.465777] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.475272] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787893, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.574560] env[62816]: INFO nova.compute.manager [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Took 19.20 seconds to build instance. [ 1377.583046] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787891, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.606970] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787892, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.181633} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.606970] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1377.608419] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a75ed8-2b7e-4e01-b23c-7b84547c4802 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.634417] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1377.638047] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f62a478f-e58b-4f93-ae3e-cfc1e10f3847 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.660707] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612962} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.662403] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1377.663298] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1377.663298] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1377.663298] env[62816]: value = "task-1787894" [ 1377.663298] env[62816]: _type = "Task" [ 1377.663298] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.663298] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bf8ece4-dbe8-467b-b5e9-5431799d6a78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.675899] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787894, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.677261] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1377.677261] env[62816]: value = "task-1787895" [ 1377.677261] env[62816]: _type = "Task" [ 1377.677261] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.685582] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787895, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.708581] env[62816]: DEBUG nova.network.neutron [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Updating instance_info_cache with network_info: [{"id": "0797e610-fb6d-45a5-b6f3-5da9fd5eeca8", "address": "fa:16:3e:91:d7:fd", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0797e610-fb", "ovs_interfaceid": "0797e610-fb6d-45a5-b6f3-5da9fd5eeca8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1377.826600] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1377.834702] env[62816]: ERROR nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [req-2cbb9692-5b76-4a55-b761-abe21e044f5e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2cbb9692-5b76-4a55-b761-abe21e044f5e"}]} [ 1377.860253] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1377.860490] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1377.860641] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1377.860817] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1377.860959] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1377.861911] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1377.862187] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1377.862357] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1377.862892] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1377.862892] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1377.862892] env[62816]: DEBUG nova.virt.hardware [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1377.866320] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fccfb15-a462-479b-b9fb-4b6ba4ead9f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.867570] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1377.877557] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374e52dd-0655-4170-bed8-a3cac7b333b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.905399] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1377.905541] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1377.909072] env[62816]: INFO nova.compute.manager [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Took 22.27 seconds to build instance. [ 1377.954322] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1377.954854] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 19 to 20 during operation: update_aggregates {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1377.977952] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787893, 'name': Rename_Task, 'duration_secs': 0.395869} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.979280] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1377.981879] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1377.982478] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9d0d9ca-f203-4f47-a93b-721aa3fef096 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.992389] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1377.992389] env[62816]: value = "task-1787896" [ 1377.992389] env[62816]: _type = "Task" [ 1377.992389] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.004710] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787896, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.076510] env[62816]: DEBUG oslo_concurrency.lockutils [None req-76bb8f54-3f8b-4493-9bd3-bd33bb923082 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.716s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.084202] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787891, 'name': ReconfigVM_Task, 'duration_secs': 0.950099} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.087507] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 666d5105-ee2e-4691-b13c-bd7feb045959/666d5105-ee2e-4691-b13c-bd7feb045959.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.089235] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2def31b-4cdb-44db-b6fe-3edfb691a92b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.096398] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1378.096398] env[62816]: value = "task-1787897" [ 1378.096398] env[62816]: _type = "Task" [ 1378.096398] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.108038] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787897, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.181561] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787894, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.195665] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787895, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182034} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.196100] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1378.197073] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23d3f9e-2775-47c4-b9f8-1b23a3766c8d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.215155] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "refresh_cache-927badc2-decf-49af-b2c0-d95b471272c9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.215486] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Instance network_info: |[{"id": "0797e610-fb6d-45a5-b6f3-5da9fd5eeca8", "address": "fa:16:3e:91:d7:fd", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0797e610-fb", "ovs_interfaceid": "0797e610-fb6d-45a5-b6f3-5da9fd5eeca8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1378.226854] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1378.235500] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:d7:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0797e610-fb6d-45a5-b6f3-5da9fd5eeca8', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1378.243699] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Creating folder: Project (f8179e67e019493a894cd7c67825743c). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.244147] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1519349c-f29f-42b5-b219-99264c85dd73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.262191] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99d02657-cacd-454a-86a2-259f14b84847 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.265647] env[62816]: DEBUG nova.compute.manager [req-59755bd1-782d-4d23-b7df-fbd07296d185 req-1050e35e-673d-491d-b81a-4a34edfe96a3 service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Received event network-vif-plugged-0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1378.265647] env[62816]: DEBUG oslo_concurrency.lockutils [req-59755bd1-782d-4d23-b7df-fbd07296d185 req-1050e35e-673d-491d-b81a-4a34edfe96a3 service nova] Acquiring lock "927badc2-decf-49af-b2c0-d95b471272c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.265647] env[62816]: DEBUG oslo_concurrency.lockutils [req-59755bd1-782d-4d23-b7df-fbd07296d185 req-1050e35e-673d-491d-b81a-4a34edfe96a3 service nova] Lock "927badc2-decf-49af-b2c0-d95b471272c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.265647] env[62816]: DEBUG oslo_concurrency.lockutils [req-59755bd1-782d-4d23-b7df-fbd07296d185 req-1050e35e-673d-491d-b81a-4a34edfe96a3 service nova] Lock "927badc2-decf-49af-b2c0-d95b471272c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.265647] env[62816]: DEBUG nova.compute.manager [req-59755bd1-782d-4d23-b7df-fbd07296d185 req-1050e35e-673d-491d-b81a-4a34edfe96a3 service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] No waiting events found dispatching network-vif-plugged-0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1378.266018] env[62816]: WARNING nova.compute.manager [req-59755bd1-782d-4d23-b7df-fbd07296d185 req-1050e35e-673d-491d-b81a-4a34edfe96a3 service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Received unexpected event network-vif-plugged-0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 for instance with vm_state building and task_state spawning. [ 1378.275238] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1378.275238] env[62816]: value = "task-1787899" [ 1378.275238] env[62816]: _type = "Task" [ 1378.275238] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.283718] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Created folder: Project (f8179e67e019493a894cd7c67825743c) in parent group-v370905. [ 1378.283862] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Creating folder: Instances. Parent ref: group-v370926. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1378.284994] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a65b2304-6f04-4983-820a-f72556ac1a9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.291953] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787899, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.305039] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Created folder: Instances in parent group-v370926. [ 1378.305513] env[62816]: DEBUG oslo.service.loopingcall [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1378.305680] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1378.310344] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d07889ea-6327-4afb-9d9a-7e02a308f398 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.334894] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1378.334894] env[62816]: value = "task-1787901" [ 1378.334894] env[62816]: _type = "Task" [ 1378.334894] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.344828] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787901, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.399326] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bb0a22-76c0-4170-b0ca-43600f2fb5bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.413269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-47219090-db0f-4cc1-8a99-69fe85b01206 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.780s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1378.419251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b936273-97c7-43d2-97c2-f261da678f2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.430144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.430144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.476873] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquiring lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1378.476873] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1378.478438] env[62816]: DEBUG nova.network.neutron [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Successfully updated port: 625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1378.481477] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63510092-ba76-4541-a77a-6f867a798300 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.504746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99c7b00-b608-41ab-b8c2-966697426697 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.555991] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787896, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.555991] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1378.585139] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1378.608621] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787897, 'name': Rename_Task, 'duration_secs': 0.262262} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.608914] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1378.609373] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c6112af-3e2e-4f10-99ca-ca6794c82fbc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.622792] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1378.622792] env[62816]: value = "task-1787902" [ 1378.622792] env[62816]: _type = "Task" [ 1378.622792] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.639070] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.677686] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787894, 'name': ReconfigVM_Task, 'duration_secs': 0.612688} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.677975] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Reconfigured VM instance instance-00000005 to attach disk [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.678624] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-865dc938-07fe-4a99-922f-0195fa5f7a00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.688340] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1378.688340] env[62816]: value = "task-1787903" [ 1378.688340] env[62816]: _type = "Task" [ 1378.688340] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.703175] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787903, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.789938] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787899, 'name': ReconfigVM_Task, 'duration_secs': 0.507309} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.790299] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.791503] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8a65905-080c-43a1-bf7a-ceabb0822284 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.800377] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1378.800377] env[62816]: value = "task-1787904" [ 1378.800377] env[62816]: _type = "Task" [ 1378.800377] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.809663] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787904, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.849589] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787901, 'name': CreateVM_Task, 'duration_secs': 0.417613} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.849868] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1378.850771] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.851736] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.851736] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1378.852086] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c67c8b22-e8c4-43fc-8468-cf5c915792f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.862236] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1378.862236] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52932355-c86c-816e-eeb7-aaf209583dc1" [ 1378.862236] env[62816]: _type = "Task" [ 1378.862236] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.872096] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52932355-c86c-816e-eeb7-aaf209583dc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.926842] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1378.992112] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1378.992268] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquired lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.992425] env[62816]: DEBUG nova.network.neutron [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1379.017736] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787896, 'name': PowerOnVM_Task} progress is 86%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.080651] env[62816]: ERROR nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [req-e9a49fa2-0bd6-4b0a-b078-67587a52c789] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e9a49fa2-0bd6-4b0a-b078-67587a52c789"}]} [ 1379.107037] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1379.121531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.128433] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1379.128819] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1379.144343] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787902, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.145739] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1379.167794] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1379.211486] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787903, 'name': Rename_Task, 'duration_secs': 0.22256} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.211772] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1379.212130] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d5e207d-8fbe-4801-9183-c68e3e9fc0fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.224222] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1379.224222] env[62816]: value = "task-1787905" [ 1379.224222] env[62816]: _type = "Task" [ 1379.224222] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.238607] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787905, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.312244] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787904, 'name': Rename_Task, 'duration_secs': 0.236436} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.312681] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1379.313123] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21c507a8-8a32-4df0-ac73-bd9ed8a9a71b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.322824] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1379.322824] env[62816]: value = "task-1787906" [ 1379.322824] env[62816]: _type = "Task" [ 1379.322824] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.335594] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787906, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.382654] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52932355-c86c-816e-eeb7-aaf209583dc1, 'name': SearchDatastore_Task, 'duration_secs': 0.029989} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.382990] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.383267] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1379.383545] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1379.383766] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1379.383962] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.386955] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-749b3440-5f1b-428f-896e-a7f533cd6c95 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.406435] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.406647] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1379.410523] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69ddc7f1-41b8-427e-a474-17db0a49f5cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.413872] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1379.413872] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529450c6-ef8e-4eeb-0169-0ec02c5f5139" [ 1379.413872] env[62816]: _type = "Task" [ 1379.413872] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.423211] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529450c6-ef8e-4eeb-0169-0ec02c5f5139, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.448861] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.490918] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7029a282-c429-4e43-996c-fbb421d3de95 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.507316] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c853b6-4763-4857-9f72-cba15826125e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.522021] env[62816]: DEBUG oslo_vmware.api [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1787896, 'name': PowerOnVM_Task, 'duration_secs': 1.108243} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.546191] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1379.546524] env[62816]: INFO nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Took 19.95 seconds to spawn the instance on the hypervisor. [ 1379.546780] env[62816]: DEBUG nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1379.548397] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5f644e-dcb9-437e-b192-02dcaaec39b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.553188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af0d09fe-6f86-4f06-9a6d-fde7b131bb99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.573190] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d529d11-3f9a-4982-8d37-5712ce9333a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.588488] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1379.634089] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787902, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.663542] env[62816]: DEBUG nova.network.neutron [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.733480] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787905, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.832408] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787906, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.927530] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529450c6-ef8e-4eeb-0169-0ec02c5f5139, 'name': SearchDatastore_Task, 'duration_secs': 0.032686} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.927869] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-849afa81-4e59-4857-91a4-83a5a823223f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.934737] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1379.934737] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52382965-535b-987d-7282-a83d8d9cefcb" [ 1379.934737] env[62816]: _type = "Task" [ 1379.934737] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.949596] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52382965-535b-987d-7282-a83d8d9cefcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.085771] env[62816]: INFO nova.compute.manager [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Took 24.75 seconds to build instance. [ 1380.138771] env[62816]: DEBUG oslo_vmware.api [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787902, 'name': PowerOnVM_Task, 'duration_secs': 1.26029} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.139637] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1380.140099] env[62816]: INFO nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Took 16.13 seconds to spawn the instance on the hypervisor. [ 1380.140605] env[62816]: DEBUG nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1380.142542] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2039ee-b057-46ff-8a36-492e179914d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.146717] env[62816]: DEBUG nova.scheduler.client.report [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 21 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1380.147248] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 21 to 22 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1380.147568] env[62816]: DEBUG nova.compute.provider_tree [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.233607] env[62816]: DEBUG oslo_vmware.api [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787905, 'name': PowerOnVM_Task, 'duration_secs': 0.954403} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.233938] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1380.234188] env[62816]: INFO nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Took 11.77 seconds to spawn the instance on the hypervisor. [ 1380.234400] env[62816]: DEBUG nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1380.235569] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4c9e5e-2664-42c9-9a30-4468ef14fe80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.263526] env[62816]: DEBUG nova.network.neutron [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updating instance_info_cache with network_info: [{"id": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "address": "fa:16:3e:32:f6:90", "network": {"id": "6b18a31b-5da2-4068-8d98-3f6d1b0178c3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1012623858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55ac373cda544fdda9b58434d070d395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap625f74d1-1d", "ovs_interfaceid": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1380.336549] env[62816]: DEBUG oslo_vmware.api [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787906, 'name': PowerOnVM_Task, 'duration_secs': 0.89857} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.336549] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1380.337024] env[62816]: INFO nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1380.337024] env[62816]: DEBUG nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1380.337758] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2ccd95-262f-492c-992c-afc17f3c1bf6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.450356] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52382965-535b-987d-7282-a83d8d9cefcb, 'name': SearchDatastore_Task, 'duration_secs': 0.017584} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.450641] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.451171] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 927badc2-decf-49af-b2c0-d95b471272c9/927badc2-decf-49af-b2c0-d95b471272c9.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1380.451299] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a57758e-f180-4041-8919-e8620cab6679 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.460652] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1380.460652] env[62816]: value = "task-1787907" [ 1380.460652] env[62816]: _type = "Task" [ 1380.460652] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.469408] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.563921] env[62816]: DEBUG nova.network.neutron [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Successfully updated port: 7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.588344] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d792b28-ea58-4ec4-a13f-20cdd425295d tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.262s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.657628] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.873s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1380.658246] env[62816]: DEBUG nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1380.661119] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.540s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.667878] env[62816]: INFO nova.compute.claims [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1380.679158] env[62816]: INFO nova.compute.manager [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Took 24.70 seconds to build instance. [ 1380.754649] env[62816]: INFO nova.compute.manager [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Took 20.33 seconds to build instance. [ 1380.765825] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Releasing lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1380.765936] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Instance network_info: |[{"id": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "address": "fa:16:3e:32:f6:90", "network": {"id": "6b18a31b-5da2-4068-8d98-3f6d1b0178c3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1012623858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55ac373cda544fdda9b58434d070d395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap625f74d1-1d", "ovs_interfaceid": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1380.766602] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:f6:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '625f74d1-1d6d-4ca8-90f2-5b8327963031', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1380.780538] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Creating folder: Project (55ac373cda544fdda9b58434d070d395). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.782349] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9add6c20-9a6e-4bc2-843e-c3f41db0d346 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.798785] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Created folder: Project (55ac373cda544fdda9b58434d070d395) in parent group-v370905. [ 1380.799298] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Creating folder: Instances. Parent ref: group-v370929. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1380.799556] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-527bc6c5-0c78-4a16-908a-2340556f059e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.834643] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Created folder: Instances in parent group-v370929. [ 1380.834945] env[62816]: DEBUG oslo.service.loopingcall [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.835157] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.835393] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41431e8e-6a66-45c8-ac6f-61e257583287 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.865344] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.865344] env[62816]: value = "task-1787910" [ 1380.865344] env[62816]: _type = "Task" [ 1380.865344] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.865713] env[62816]: INFO nova.compute.manager [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Took 18.47 seconds to build instance. [ 1380.877694] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787910, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.980813] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787907, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.067349] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.067471] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.067529] env[62816]: DEBUG nova.network.neutron [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1381.093248] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.174787] env[62816]: DEBUG nova.compute.utils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1381.174858] env[62816]: DEBUG nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1381.182031] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c771cc90-b625-403c-8fba-7e34807b0bb7 tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "666d5105-ee2e-4691-b13c-bd7feb045959" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.212s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.258220] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2f8162c6-e54b-4669-9b3c-b64f51ad40bd tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.842s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.379319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddaf2510-9fc4-4157-9c4f-77559973341b tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "7be4c8f8-240c-4a71-93bb-aeb94243d781" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.389971] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787910, 'name': CreateVM_Task, 'duration_secs': 0.4892} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.389971] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.390441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.390856] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.391262] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1381.392112] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3083028-2063-44d9-b928-3eb0bb14a43b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.401366] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1381.401366] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52badf6c-7d84-f305-c034-d3c49c8ff61a" [ 1381.401366] env[62816]: _type = "Task" [ 1381.401366] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.412763] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52badf6c-7d84-f305-c034-d3c49c8ff61a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.476840] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.883973} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.477118] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 927badc2-decf-49af-b2c0-d95b471272c9/927badc2-decf-49af-b2c0-d95b471272c9.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1381.477326] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1381.477690] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d6b4c065-e3c6-449a-a3af-476fa02fa19d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.487904] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1381.487904] env[62816]: value = "task-1787911" [ 1381.487904] env[62816]: _type = "Task" [ 1381.487904] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.499722] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787911, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.635794] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1381.680468] env[62816]: DEBUG nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1381.685930] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Received event network-changed-0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1381.686080] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Refreshing instance network info cache due to event network-changed-0797e610-fb6d-45a5-b6f3-5da9fd5eeca8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1381.687966] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Acquiring lock "refresh_cache-927badc2-decf-49af-b2c0-d95b471272c9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.687966] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Acquired lock "refresh_cache-927badc2-decf-49af-b2c0-d95b471272c9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.687966] env[62816]: DEBUG nova.network.neutron [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Refreshing network info cache for port 0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1381.696124] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.764816] env[62816]: DEBUG nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.844895] env[62816]: DEBUG nova.network.neutron [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1381.883334] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.923039] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52badf6c-7d84-f305-c034-d3c49c8ff61a, 'name': SearchDatastore_Task, 'duration_secs': 0.020264} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.923939] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.924321] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1381.924683] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.924946] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.925265] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.925637] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f7485f9-2857-402b-93a9-1e456fcb136f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.006284] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787911, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106516} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.006617] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1382.014810] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f70769-c85c-491b-b4cc-0ded95d64479 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.046623] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 927badc2-decf-49af-b2c0-d95b471272c9/927badc2-decf-49af-b2c0-d95b471272c9.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1382.052385] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0c51c3d-c566-4d91-8645-03594d60d63c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.080623] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1382.080623] env[62816]: value = "task-1787912" [ 1382.080623] env[62816]: _type = "Task" [ 1382.080623] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.094745] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787912, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.151492] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415205cb-0f8e-459a-9870-cc9c5152ed72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.162481] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30aafe1-9b93-487d-b74c-39419d53e0b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.204199] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd2a2f9-56e8-403a-af27-21c3f0c844ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.215266] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c488d19-4f67-4d22-ac9a-06a0c936ca9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.229929] env[62816]: DEBUG nova.compute.provider_tree [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1382.237379] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.286861] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.408166] env[62816]: DEBUG nova.compute.manager [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Received event network-changed {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.408166] env[62816]: DEBUG nova.compute.manager [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Refreshing instance network info cache due to event network-changed. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1382.408166] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] Acquiring lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.408166] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] Acquired lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.408166] env[62816]: DEBUG nova.network.neutron [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1382.416886] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.592703] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787912, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.701470] env[62816]: DEBUG nova.network.neutron [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updating instance_info_cache with network_info: [{"id": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "address": "fa:16:3e:c2:e3:73", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b35c8f0-5f", "ovs_interfaceid": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.709390] env[62816]: DEBUG nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1382.740028] env[62816]: DEBUG nova.scheduler.client.report [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1382.749974] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1382.750213] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1382.750324] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1382.750499] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1382.750636] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1382.750774] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1382.750988] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1382.751216] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1382.751403] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1382.751566] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1382.751734] env[62816]: DEBUG nova.virt.hardware [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1382.753968] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ea2053-c019-40b3-a66c-16c8b21280c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.763523] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271d9e7d-620d-4c63-b333-4e97cb84029a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.783374] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1382.788990] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Creating folder: Project (b0d7c02fe96c465bb48673f3a49f2458). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.789714] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b2df6f4-0702-4c77-af4f-728b668f955b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.802192] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Created folder: Project (b0d7c02fe96c465bb48673f3a49f2458) in parent group-v370905. [ 1382.802192] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Creating folder: Instances. Parent ref: group-v370932. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1382.802192] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-231b99a1-44fd-4162-bc05-80d5714348dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.811665] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Created folder: Instances in parent group-v370932. [ 1382.814027] env[62816]: DEBUG oslo.service.loopingcall [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1382.814027] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1382.814027] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1966fd0f-12c9-4206-a8e9-b7aaf3b125b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.835387] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1382.835387] env[62816]: value = "task-1787915" [ 1382.835387] env[62816]: _type = "Task" [ 1382.835387] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.844573] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.098856] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787912, 'name': ReconfigVM_Task, 'duration_secs': 0.887907} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.100030] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 927badc2-decf-49af-b2c0-d95b471272c9/927badc2-decf-49af-b2c0-d95b471272c9.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1383.100229] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7b03044b-a079-455e-9370-c1f95a20a97d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.109284] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1383.109284] env[62816]: value = "task-1787916" [ 1383.109284] env[62816]: _type = "Task" [ 1383.109284] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.119992] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787916, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.201664] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.202751] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Instance network_info: |[{"id": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "address": "fa:16:3e:c2:e3:73", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b35c8f0-5f", "ovs_interfaceid": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1383.202910] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:e3:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b35c8f0-5f21-4920-93b4-f88823b815ab', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1383.213835] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Creating folder: Project (2c6b942889914783a95c2abb080137a9). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.214874] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-391f07f6-5945-46f7-9494-5793e726bf96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.225473] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Created folder: Project (2c6b942889914783a95c2abb080137a9) in parent group-v370905. [ 1383.225671] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Creating folder: Instances. Parent ref: group-v370935. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1383.227035] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fa1b4d82-488e-4df3-8b92-f578b733aa77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.236119] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Created folder: Instances in parent group-v370935. [ 1383.236119] env[62816]: DEBUG oslo.service.loopingcall [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1383.236263] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1383.236971] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee0d184d-b191-43a0-a721-90d358c2dd6c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.257137] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.260561] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1383.260958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.812s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.262670] env[62816]: INFO nova.compute.claims [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1383.272101] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1383.272101] env[62816]: value = "task-1787919" [ 1383.272101] env[62816]: _type = "Task" [ 1383.272101] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.280331] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787919, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.281729] env[62816]: DEBUG nova.network.neutron [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Updated VIF entry in instance network info cache for port 0797e610-fb6d-45a5-b6f3-5da9fd5eeca8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1383.282273] env[62816]: DEBUG nova.network.neutron [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Updating instance_info_cache with network_info: [{"id": "0797e610-fb6d-45a5-b6f3-5da9fd5eeca8", "address": "fa:16:3e:91:d7:fd", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0797e610-fb", "ovs_interfaceid": "0797e610-fb6d-45a5-b6f3-5da9fd5eeca8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.351302] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.625207] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787916, 'name': Rename_Task, 'duration_secs': 0.290075} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.626034] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1383.626034] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c53c4bfd-8683-4302-b33c-65340b1ee8e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.631567] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1383.631567] env[62816]: value = "task-1787920" [ 1383.631567] env[62816]: _type = "Task" [ 1383.631567] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.641171] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787920, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.773694] env[62816]: DEBUG nova.compute.utils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1383.776400] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1383.776536] env[62816]: DEBUG nova.network.neutron [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1383.789715] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Releasing lock "refresh_cache-927badc2-decf-49af-b2c0-d95b471272c9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.789981] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Received event network-vif-plugged-625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.790226] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.790444] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1383.790590] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1383.790760] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] No waiting events found dispatching network-vif-plugged-625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1383.790926] env[62816]: WARNING nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Received unexpected event network-vif-plugged-625f74d1-1d6d-4ca8-90f2-5b8327963031 for instance with vm_state building and task_state spawning. [ 1383.791104] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Received event network-changed-625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1383.791292] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Refreshing instance network info cache due to event network-changed-625f74d1-1d6d-4ca8-90f2-5b8327963031. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1383.791470] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Acquiring lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1383.791676] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Acquired lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1383.791749] env[62816]: DEBUG nova.network.neutron [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Refreshing network info cache for port 625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1383.799560] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787919, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.848985] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.048327] env[62816]: DEBUG nova.network.neutron [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Updating instance_info_cache with network_info: [{"id": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "address": "fa:16:3e:16:82:4e", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.72", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19d501a7-a6", "ovs_interfaceid": "19d501a7-a6e8-4c20-95c7-6546ec0a4bba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.091368] env[62816]: DEBUG nova.policy [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c624faef55d44cd8c4871ac08954840', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '934fdecf54c6435999885451fc2204ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1384.142446] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787920, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.278297] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1384.297889] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787919, 'name': CreateVM_Task, 'duration_secs': 0.638373} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.299372] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1384.299858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.300061] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.300366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1384.303977] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f366f5c0-efa6-4ca2-9c65-48a33c7a2d62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.312818] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1384.312818] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522083a2-b2bf-1189-f9aa-5094072fb581" [ 1384.312818] env[62816]: _type = "Task" [ 1384.312818] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.333157] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522083a2-b2bf-1189-f9aa-5094072fb581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.351686] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.551735] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f804660e-9ac8-4c7d-b183-39fb10c177c0 tempest-ServerExternalEventsTest-1643110373 tempest-ServerExternalEventsTest-1643110373-project] Releasing lock "refresh_cache-666d5105-ee2e-4691-b13c-bd7feb045959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1384.634747] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84ea853-f365-42a4-9185-37a96d8ff88f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.648467] env[62816]: DEBUG oslo_vmware.api [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1787920, 'name': PowerOnVM_Task, 'duration_secs': 0.710066} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.650422] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5c3265-7629-4a8e-b91e-f2382dbfe320 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.653670] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1384.654066] env[62816]: INFO nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Took 11.51 seconds to spawn the instance on the hypervisor. [ 1384.654154] env[62816]: DEBUG nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1384.654965] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a84cd558-8da9-41b6-9551-5e118e971d8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.697179] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f793f4-bb6a-470f-8376-36fb813288d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.706017] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c987ba82-5249-4fb6-924f-0a7640f210b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.720745] env[62816]: DEBUG nova.compute.provider_tree [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1384.823068] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522083a2-b2bf-1189-f9aa-5094072fb581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.851343] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.117345] env[62816]: DEBUG nova.network.neutron [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updated VIF entry in instance network info cache for port 625f74d1-1d6d-4ca8-90f2-5b8327963031. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1385.118128] env[62816]: DEBUG nova.network.neutron [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updating instance_info_cache with network_info: [{"id": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "address": "fa:16:3e:32:f6:90", "network": {"id": "6b18a31b-5da2-4068-8d98-3f6d1b0178c3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1012623858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55ac373cda544fdda9b58434d070d395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap625f74d1-1d", "ovs_interfaceid": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1385.168716] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.169097] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.204619] env[62816]: INFO nova.compute.manager [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Took 17.73 seconds to build instance. [ 1385.223819] env[62816]: DEBUG nova.scheduler.client.report [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1385.263151] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "666d5105-ee2e-4691-b13c-bd7feb045959" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.263415] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "666d5105-ee2e-4691-b13c-bd7feb045959" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.263628] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "666d5105-ee2e-4691-b13c-bd7feb045959-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.263811] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "666d5105-ee2e-4691-b13c-bd7feb045959-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.264036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "666d5105-ee2e-4691-b13c-bd7feb045959-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.266521] env[62816]: INFO nova.compute.manager [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Terminating instance [ 1385.270806] env[62816]: DEBUG nova.compute.manager [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1385.270806] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1385.270806] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702aaef6-e333-482d-aa90-e996c1b2651f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.280055] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1385.280055] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63f2d5d2-b0bd-4591-958c-28a930977a45 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.290282] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1385.290282] env[62816]: value = "task-1787921" [ 1385.290282] env[62816]: _type = "Task" [ 1385.290282] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.291396] env[62816]: DEBUG nova.network.neutron [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Successfully created port: de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1385.294316] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1385.309136] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787921, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.334817] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522083a2-b2bf-1189-f9aa-5094072fb581, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.341475] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:50:19Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1007996031',id=31,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-562786499',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1385.341475] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1385.341475] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1385.342021] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1385.342021] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1385.342021] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1385.342021] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1385.342021] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1385.342228] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1385.342228] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1385.342228] env[62816]: DEBUG nova.virt.hardware [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1385.342228] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1034a25c-3b96-40aa-a424-709576eb2471 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.362604] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.366431] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f7511b-1db6-4e3c-9934-42645aad7ec9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.453299] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1385.453593] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1385.454456] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-167142d5-a776-4e7f-a9ea-a3178e578e14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.461621] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1385.461621] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529e7b0c-1b74-e374-8a6a-05f602cd9f90" [ 1385.461621] env[62816]: _type = "Task" [ 1385.461621] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.473517] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529e7b0c-1b74-e374-8a6a-05f602cd9f90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.626254] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Releasing lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.626664] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Received event network-vif-plugged-7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1385.626911] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Acquiring lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.627196] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.627425] env[62816]: DEBUG oslo_concurrency.lockutils [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.627635] env[62816]: DEBUG nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] No waiting events found dispatching network-vif-plugged-7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1385.627840] env[62816]: WARNING nova.compute.manager [req-f55a7b65-98b0-4cbc-9f07-c111fc84cf95 req-98dda42f-40c8-456c-af5e-b19c5e7b2afc service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Received unexpected event network-vif-plugged-7b35c8f0-5f21-4920-93b4-f88823b815ab for instance with vm_state building and task_state spawning. [ 1385.709307] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8d0a84d-b8c6-4d03-ac43-67baa5600459 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "927badc2-decf-49af-b2c0-d95b471272c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.251s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.729794] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.469s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.730498] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1385.733742] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.099s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.735199] env[62816]: INFO nova.compute.claims [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1385.767447] env[62816]: INFO nova.compute.manager [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Rebuilding instance [ 1385.805750] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787921, 'name': PowerOffVM_Task, 'duration_secs': 0.463468} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.806399] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1385.806658] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1385.806926] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1af7b313-e0d1-44c7-a017-7d53018d0b7c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.819565] env[62816]: DEBUG nova.compute.manager [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1385.820479] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19f5b07-88d5-4616-9f51-4c09e3309e3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.834553] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522083a2-b2bf-1189-f9aa-5094072fb581, 'name': SearchDatastore_Task, 'duration_secs': 1.155773} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.836349] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.836593] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1385.836805] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1385.853560] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.921598] env[62816]: INFO nova.compute.manager [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Rebuilding instance [ 1385.975729] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529e7b0c-1b74-e374-8a6a-05f602cd9f90, 'name': SearchDatastore_Task, 'duration_secs': 0.020096} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.980056] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-967c8d83-ba18-49ec-917d-f290ce30a2a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.983325] env[62816]: DEBUG nova.compute.manager [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1385.984358] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5c5fd3-becd-4c78-9737-ee4dc5a2ef09 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.990913] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1385.990913] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527efaab-3e8d-1219-b1c3-dee917575961" [ 1385.990913] env[62816]: _type = "Task" [ 1385.990913] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.006212] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527efaab-3e8d-1219-b1c3-dee917575961, 'name': SearchDatastore_Task, 'duration_secs': 0.010199} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.006608] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.007033] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0b10aca0-950b-46f6-8367-5cb9ea7540c8/0b10aca0-950b-46f6-8367-5cb9ea7540c8.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.007558] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.007834] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1386.008161] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f361afce-5a5e-48ca-85f1-799a670cb91b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.010924] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1dd2fff6-085b-4507-9762-aa4f94f950d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.019303] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1386.019303] env[62816]: value = "task-1787923" [ 1386.019303] env[62816]: _type = "Task" [ 1386.019303] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.020502] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1386.020679] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1386.026325] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-935c4a48-4b58-4593-85a3-96b1fa7701d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.035183] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787923, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.036758] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1386.036758] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527f2f06-6d7a-1e78-7d4f-03f7e798fe3d" [ 1386.036758] env[62816]: _type = "Task" [ 1386.036758] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.048244] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527f2f06-6d7a-1e78-7d4f-03f7e798fe3d, 'name': SearchDatastore_Task, 'duration_secs': 0.009927} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.049181] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-284cfb95-7e74-44d3-a3dc-f7068ea85fb7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.055296] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1386.055296] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5219265e-d5ad-48fd-28a7-b3b8b7bca780" [ 1386.055296] env[62816]: _type = "Task" [ 1386.055296] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.067439] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5219265e-d5ad-48fd-28a7-b3b8b7bca780, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.112599] env[62816]: DEBUG nova.compute.manager [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1386.212320] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1386.241930] env[62816]: DEBUG nova.compute.utils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1386.245317] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1386.245317] env[62816]: DEBUG nova.network.neutron [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1386.328625] env[62816]: DEBUG nova.compute.manager [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Received event network-changed-7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1386.329435] env[62816]: DEBUG nova.compute.manager [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Refreshing instance network info cache due to event network-changed-7b35c8f0-5f21-4920-93b4-f88823b815ab. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1386.330152] env[62816]: DEBUG oslo_concurrency.lockutils [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] Acquiring lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.330152] env[62816]: DEBUG oslo_concurrency.lockutils [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] Acquired lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.330152] env[62816]: DEBUG nova.network.neutron [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Refreshing network info cache for port 7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1386.342652] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1386.343218] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e17ac9a-cc03-4b89-b6c1-936b384698ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.356061] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1386.356061] env[62816]: value = "task-1787924" [ 1386.356061] env[62816]: _type = "Task" [ 1386.356061] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.365266] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.374077] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787924, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.501813] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1386.502133] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72385b78-659c-4520-aea6-404a3677cd1d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.517565] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1386.517565] env[62816]: value = "task-1787925" [ 1386.517565] env[62816]: _type = "Task" [ 1386.517565] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.532380] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1386.532682] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1386.532925] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Deleting the datastore file [datastore1] 666d5105-ee2e-4691-b13c-bd7feb045959 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.540154] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c5e1b2cd-ca79-4d0a-a067-b582a3d3d0d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.546124] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787925, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.552989] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787923, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.554471] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for the task: (returnval){ [ 1386.554471] env[62816]: value = "task-1787926" [ 1386.554471] env[62816]: _type = "Task" [ 1386.554471] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.566015] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.569886] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5219265e-d5ad-48fd-28a7-b3b8b7bca780, 'name': SearchDatastore_Task, 'duration_secs': 0.009686} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.570189] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.570451] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 11a4d835-c149-49f0-8e4f-b3f9a7f1afca/11a4d835-c149-49f0-8e4f-b3f9a7f1afca.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1386.570716] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dd66f045-2f48-42bd-bb69-f22d2c8d852b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.579859] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1386.579859] env[62816]: value = "task-1787927" [ 1386.579859] env[62816]: _type = "Task" [ 1386.579859] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.588912] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.622026] env[62816]: DEBUG nova.policy [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c66397a9f74a45aa83e1b34aa57d4ffc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6ace0e7bd9da4b57b3a05cd6d1b86dfb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1386.640016] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.748559] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.749719] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1386.860532] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787915, 'name': CreateVM_Task, 'duration_secs': 3.655801} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.866227] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1386.871578] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1386.871723] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1386.871979] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1386.873404] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a22696f-838c-45f7-befc-d7c5c0bc0296 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.887477] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787924, 'name': PowerOffVM_Task, 'duration_secs': 0.158524} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.888415] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1386.888570] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1386.892055] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce6dbc2-29b0-4a13-ab74-336bbe1984a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.903324] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1386.903324] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f82e76-2329-9a49-de53-3892b15ecf83" [ 1386.903324] env[62816]: _type = "Task" [ 1386.903324] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.914172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1386.915175] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-37076718-5419-4a04-8155-143ef1bd068d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.920936] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f82e76-2329-9a49-de53-3892b15ecf83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.948294] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1386.948631] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1386.949783] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Deleting the datastore file [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1386.949783] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24545849-7aaa-4908-a624-caece6a85054 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.974954] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.975132] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.977788] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1386.977788] env[62816]: value = "task-1787929" [ 1386.977788] env[62816]: _type = "Task" [ 1386.977788] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.009804] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.034539] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787925, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.054946] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787923, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.067236] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787926, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.101041] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.314890] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a7f671-5067-47fe-a68f-28c874f10b3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.327704] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deadb487-897e-4f47-91c5-9e8b810e5e11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.365409] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c28cac2-721c-4d7a-8905-66fc254b25d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.373858] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b5aa360-7eeb-48e5-9168-bff67cf0d227 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.391570] env[62816]: DEBUG nova.compute.provider_tree [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.418019] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f82e76-2329-9a49-de53-3892b15ecf83, 'name': SearchDatastore_Task, 'duration_secs': 0.201721} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.418019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.418019] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1387.418019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.418246] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.418246] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1387.418246] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b8fe810-2550-4b4d-8383-dee68ef3b917 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.426682] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1387.426862] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1387.427609] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eeb4b9cd-40b2-4605-b881-6cc1258a5afc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.435358] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1387.435358] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5227065f-4b2c-14dc-3ae8-f983bee42cf8" [ 1387.435358] env[62816]: _type = "Task" [ 1387.435358] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.444295] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5227065f-4b2c-14dc-3ae8-f983bee42cf8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.491691] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35315} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.491956] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1387.492160] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1387.492339] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1387.510889] env[62816]: DEBUG nova.network.neutron [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updated VIF entry in instance network info cache for port 7b35c8f0-5f21-4920-93b4-f88823b815ab. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1387.511341] env[62816]: DEBUG nova.network.neutron [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updating instance_info_cache with network_info: [{"id": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "address": "fa:16:3e:c2:e3:73", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b35c8f0-5f", "ovs_interfaceid": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1387.529845] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787925, 'name': PowerOffVM_Task, 'duration_secs': 0.861633} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.530217] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1387.536944] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1387.536944] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5593f777-eb34-4fd7-9024-cca8dc1b0f56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.548031] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1387.552499] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51d1aaab-1d4f-4bcd-8359-c6ca087bf94f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.554857] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787923, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.177848} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.554857] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0b10aca0-950b-46f6-8367-5cb9ea7540c8/0b10aca0-950b-46f6-8367-5cb9ea7540c8.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1387.555119] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1387.556101] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c926e85-0c56-4f34-bc33-ee945af127e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.569151] env[62816]: DEBUG oslo_vmware.api [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Task: {'id': task-1787926, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.004301} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.571121] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1387.571377] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1387.571565] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1387.571744] env[62816]: INFO nova.compute.manager [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Took 2.30 seconds to destroy the instance on the hypervisor. [ 1387.572010] env[62816]: DEBUG oslo.service.loopingcall [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1387.572436] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1387.572436] env[62816]: value = "task-1787931" [ 1387.572436] env[62816]: _type = "Task" [ 1387.572436] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.572601] env[62816]: DEBUG nova.compute.manager [-] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1387.572638] env[62816]: DEBUG nova.network.neutron [-] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1387.579444] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1387.579886] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1387.580111] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleting the datastore file [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1387.581484] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61aacbbe-68ad-41f2-8afd-f673bac42ef0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.586800] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.595818] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787927, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.597254] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1387.597254] env[62816]: value = "task-1787932" [ 1387.597254] env[62816]: _type = "Task" [ 1387.597254] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.608271] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.765580] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1387.788838] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1387.789134] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1387.789323] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1387.789517] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1387.789661] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1387.789808] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1387.790050] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1387.790225] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1387.790398] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1387.790560] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1387.790732] env[62816]: DEBUG nova.virt.hardware [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1387.791653] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da05f78f-ebc9-4860-99f7-b78fd3bf0af6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.801824] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a6d2ea-f310-4092-8e11-1ce42e773a4b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.895517] env[62816]: DEBUG nova.scheduler.client.report [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1387.946362] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5227065f-4b2c-14dc-3ae8-f983bee42cf8, 'name': SearchDatastore_Task, 'duration_secs': 0.262732} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.949652] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae57f964-6528-45ce-8fd5-d5e4d1c78c48 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.955183] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1387.955183] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52891284-a0d8-f651-4420-9e1f4de7c3cf" [ 1387.955183] env[62816]: _type = "Task" [ 1387.955183] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.966564] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52891284-a0d8-f651-4420-9e1f4de7c3cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.017366] env[62816]: DEBUG oslo_concurrency.lockutils [req-2693652b-29fe-47c5-879b-db697a6fbeab req-e2a9e733-e54c-41d8-b174-52eabc362316 service nova] Releasing lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.085972] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070902} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.087368] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1388.092374] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8db0d6-c9a6-4073-971d-60947c8d6e4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.118259] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787927, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.130981] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 0b10aca0-950b-46f6-8367-5cb9ea7540c8/0b10aca0-950b-46f6-8367-5cb9ea7540c8.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1388.134086] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81cb6810-1214-4f65-88c2-7bef1614cbd7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.152810] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.158798] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1388.158798] env[62816]: value = "task-1787933" [ 1388.158798] env[62816]: _type = "Task" [ 1388.158798] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.166427] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787933, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.206037] env[62816]: DEBUG nova.network.neutron [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Successfully created port: c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1388.402229] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.402819] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1388.405378] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.168s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.407481] env[62816]: INFO nova.compute.claims [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.468368] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52891284-a0d8-f651-4420-9e1f4de7c3cf, 'name': SearchDatastore_Task, 'duration_secs': 0.126046} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.468633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.468918] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1388.469156] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa451413-5d93-45a2-b19e-0e9acdafb49d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.476547] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1388.476547] env[62816]: value = "task-1787934" [ 1388.476547] env[62816]: _type = "Task" [ 1388.476547] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.485902] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.533817] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1388.534275] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1388.534517] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1388.534719] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1388.534870] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1388.535037] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1388.535286] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1388.535406] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1388.535576] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1388.535739] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1388.535910] env[62816]: DEBUG nova.virt.hardware [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1388.537595] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26a0ca6-878d-4002-81a3-f22b5f0b1812 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.548182] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8a0360-6b64-4f9b-9709-9b8cf03c8ae3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.570052] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1388.582030] env[62816]: DEBUG oslo.service.loopingcall [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1388.582469] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1388.582800] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e7ffc94-b3f7-4400-977e-3647e91bc690 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.627765] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787927, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.738203} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.634464] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 11a4d835-c149-49f0-8e4f-b3f9a7f1afca/11a4d835-c149-49f0-8e4f-b3f9a7f1afca.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1388.634861] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1388.635313] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.774298} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.635591] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1388.635591] env[62816]: value = "task-1787935" [ 1388.635591] env[62816]: _type = "Task" [ 1388.635591] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.635864] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd66bedd-f327-48fe-84ce-a03a1b7eccf8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.638871] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1388.639299] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1388.639696] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1388.658014] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787935, 'name': CreateVM_Task} progress is 15%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.660783] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1388.660783] env[62816]: value = "task-1787936" [ 1388.660783] env[62816]: _type = "Task" [ 1388.660783] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.681470] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787933, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.687238] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787936, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.720407] env[62816]: DEBUG nova.network.neutron [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Successfully updated port: de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1388.723475] env[62816]: DEBUG nova.network.neutron [-] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.772622] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "914b187f-b05f-49d4-bf61-d536ef61934d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.772838] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "914b187f-b05f-49d4-bf61-d536ef61934d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.912640] env[62816]: DEBUG nova.compute.utils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1388.918444] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1388.918648] env[62816]: DEBUG nova.network.neutron [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1388.990950] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787934, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.081342] env[62816]: DEBUG nova.policy [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a3267ab64e4640bf00a0e5dbaaf044', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d830983a3c14168b8f0b67478f27589', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1389.163535] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787935, 'name': CreateVM_Task, 'duration_secs': 0.343331} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.168668] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1389.170672] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.170672] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.170672] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1389.174327] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81c14597-fa7b-4296-95a2-420f003463b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.181102] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787933, 'name': ReconfigVM_Task, 'duration_secs': 0.536} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.182580] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 0b10aca0-950b-46f6-8367-5cb9ea7540c8/0b10aca0-950b-46f6-8367-5cb9ea7540c8.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1389.183627] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ad57eab-5d93-402b-b726-72636699ebb6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.193830] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787936, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083565} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.194137] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1389.194137] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52bcd7c6-8d91-0fca-d2c2-f93f831417f1" [ 1389.194137] env[62816]: _type = "Task" [ 1389.194137] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.194879] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1389.195735] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef855c3-cca3-4206-b1f4-fc51f0b37adc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.203434] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1389.203434] env[62816]: value = "task-1787937" [ 1389.203434] env[62816]: _type = "Task" [ 1389.203434] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.222683] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bcd7c6-8d91-0fca-d2c2-f93f831417f1, 'name': SearchDatastore_Task, 'duration_secs': 0.011978} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.231383] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 11a4d835-c149-49f0-8e4f-b3f9a7f1afca/11a4d835-c149-49f0-8e4f-b3f9a7f1afca.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1389.233026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.233026] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1389.233267] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.233405] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.233578] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1389.234160] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.234269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.234421] env[62816]: DEBUG nova.network.neutron [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1389.235571] env[62816]: INFO nova.compute.manager [-] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Took 1.66 seconds to deallocate network for instance. [ 1389.238706] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-034c7d66-241d-4249-8b81-376bdd654f65 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.255872] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b666d553-ee51-4141-a086-9d6500cd39ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.259881] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787937, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.267011] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1389.267011] env[62816]: value = "task-1787938" [ 1389.267011] env[62816]: _type = "Task" [ 1389.267011] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.270974] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1389.270974] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1389.271425] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55d25558-188e-42c1-8fa0-f99a4dc238f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.277102] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787938, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.283613] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1389.283613] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525bbfee-18ff-189f-8529-b6121e8829fa" [ 1389.283613] env[62816]: _type = "Task" [ 1389.283613] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.291840] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525bbfee-18ff-189f-8529-b6121e8829fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.344200] env[62816]: DEBUG nova.network.neutron [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1389.425901] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1389.493821] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.729332} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.494111] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.494444] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.495396] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f40baece-0628-48a3-810a-3efd8b054666 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.504249] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1389.504249] env[62816]: value = "task-1787939" [ 1389.504249] env[62816]: _type = "Task" [ 1389.504249] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.512478] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.703399] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1389.703702] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1389.703857] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1389.704051] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1389.704199] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1389.704354] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1389.704562] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1389.704716] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1389.704915] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1389.705611] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1389.705611] env[62816]: DEBUG nova.virt.hardware [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1389.706469] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b699d0f1-01b8-4905-aab8-28d839f30eba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.730866] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787937, 'name': Rename_Task, 'duration_secs': 0.275701} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.732245] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a773b0e-83d6-4e86-85ca-e4b81e8d42a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.737372] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1389.740924] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb755746-ff61-4919-9685-3509f6161b6c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.755744] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1389.762832] env[62816]: DEBUG oslo.service.loopingcall [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1389.766919] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1389.767901] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.768287] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1389.768287] env[62816]: value = "task-1787940" [ 1389.768287] env[62816]: _type = "Task" [ 1389.768287] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.768730] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b55ecc01-e17f-4682-a283-7162979620be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.785353] env[62816]: DEBUG nova.network.neutron [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Successfully created port: 4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1389.788308] env[62816]: DEBUG nova.network.neutron [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updating instance_info_cache with network_info: [{"id": "de736438-152f-4337-ae73-74024c1cac15", "address": "fa:16:3e:af:7e:d2", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde736438-15", "ovs_interfaceid": "de736438-152f-4337-ae73-74024c1cac15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1389.805826] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1389.805826] env[62816]: value = "task-1787941" [ 1389.805826] env[62816]: _type = "Task" [ 1389.805826] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.809856] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787938, 'name': ReconfigVM_Task, 'duration_secs': 0.277055} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.818869] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 11a4d835-c149-49f0-8e4f-b3f9a7f1afca/11a4d835-c149-49f0-8e4f-b3f9a7f1afca.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1389.819648] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787940, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.824167] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e004a4aa-ee68-481b-8132-fbb2d839d898 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.830741] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525bbfee-18ff-189f-8529-b6121e8829fa, 'name': SearchDatastore_Task, 'duration_secs': 0.009808} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.835633] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a01f09e-a668-4f92-a164-fcfa98cf7df7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.838338] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787941, 'name': CreateVM_Task} progress is 15%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.839814] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1389.839814] env[62816]: value = "task-1787942" [ 1389.839814] env[62816]: _type = "Task" [ 1389.839814] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.848359] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1389.848359] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a1476e-a07e-ebdb-0c5f-d6cdd0388abd" [ 1389.848359] env[62816]: _type = "Task" [ 1389.848359] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.852449] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787942, 'name': Rename_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.862226] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a1476e-a07e-ebdb-0c5f-d6cdd0388abd, 'name': SearchDatastore_Task, 'duration_secs': 0.011789} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.866016] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.866016] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1389.866016] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b72d401-1f7f-4ea9-8f82-4d0b4497d1ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.867603] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0490aab6-6809-4b67-a0b4-e1eaeaee6f2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.877103] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232e4aca-31ea-4664-bceb-068a06a0d74f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.880466] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1389.880466] env[62816]: value = "task-1787943" [ 1389.880466] env[62816]: _type = "Task" [ 1389.880466] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.912268] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a9620b-4443-4e52-bccc-0fbb6f9c901d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.918247] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.923333] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44e1c0da-ef8c-42bf-8530-1f52ca56dca6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.941442] env[62816]: DEBUG nova.compute.provider_tree [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.015637] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.31808} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.015900] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.016618] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee284537-92ae-48d1-bddf-45747f6cb9aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.042960] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.042960] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0ffef2b-c931-4f5d-858d-3581dc35d205 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.076265] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1390.076265] env[62816]: value = "task-1787944" [ 1390.076265] env[62816]: _type = "Task" [ 1390.076265] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.087984] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787944, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.296549] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787940, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.297080] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Releasing lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.297385] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Instance network_info: |[{"id": "de736438-152f-4337-ae73-74024c1cac15", "address": "fa:16:3e:af:7e:d2", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde736438-15", "ovs_interfaceid": "de736438-152f-4337-ae73-74024c1cac15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1390.297778] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:af:7e:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de736438-152f-4337-ae73-74024c1cac15', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1390.306899] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Creating folder: Project (934fdecf54c6435999885451fc2204ed). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.307225] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5c82485-2818-431d-ab95-8339aad63dcb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.317704] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Created folder: Project (934fdecf54c6435999885451fc2204ed) in parent group-v370905. [ 1390.319020] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Creating folder: Instances. Parent ref: group-v370940. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1390.322884] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cc76299-839b-418c-a027-15b4d1fd4eb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.324966] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787941, 'name': CreateVM_Task, 'duration_secs': 0.303624} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.325212] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1390.325998] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.326170] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.326483] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1390.326729] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a6795af-9315-4c21-a8f5-b31a7d0501a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.332074] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1390.332074] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52312d53-9042-1ae3-734e-b748429a9af1" [ 1390.332074] env[62816]: _type = "Task" [ 1390.332074] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.333632] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Created folder: Instances in parent group-v370940. [ 1390.333744] env[62816]: DEBUG oslo.service.loopingcall [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1390.337585] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1390.337585] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c98b3a5d-da1f-4e59-a767-8111724f53ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.360801] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52312d53-9042-1ae3-734e-b748429a9af1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.361923] env[62816]: DEBUG nova.compute.manager [req-41d80590-5e4f-4837-bf51-2d8f35c3c118 req-369e8ac0-4bc8-4ce8-a0cf-f620323e0b99 service nova] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Received event network-vif-deleted-19d501a7-a6e8-4c20-95c7-6546ec0a4bba {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.364038] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1390.364038] env[62816]: value = "task-1787947" [ 1390.364038] env[62816]: _type = "Task" [ 1390.364038] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.371280] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787942, 'name': Rename_Task, 'duration_secs': 0.180473} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.371978] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1390.372179] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-881fe5e2-d226-4418-8a25-fe144cbfa9a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.377671] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787947, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.385823] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1390.385823] env[62816]: value = "task-1787948" [ 1390.385823] env[62816]: _type = "Task" [ 1390.385823] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.395339] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787943, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.400554] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.453848] env[62816]: DEBUG nova.scheduler.client.report [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1390.453848] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1390.488432] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1390.488741] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1390.489111] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1390.489359] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1390.489571] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1390.490776] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1390.490776] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1390.490776] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1390.490776] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1390.490776] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1390.491043] env[62816]: DEBUG nova.virt.hardware [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1390.492097] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ed5497-34fb-49f6-b636-8f8821ce8473 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.501658] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0487e1bd-b7fc-4e89-ac1f-9a517b7df339 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.592894] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787944, 'name': ReconfigVM_Task, 'duration_secs': 0.414011} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.592894] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1390.592894] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-40ab8105-18cf-4142-b9f3-aca163e1f770 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.600753] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1390.600753] env[62816]: value = "task-1787949" [ 1390.600753] env[62816]: _type = "Task" [ 1390.600753] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.616273] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787949, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.634699] env[62816]: DEBUG nova.compute.manager [req-d2760231-afc6-47df-a639-0a652878696e req-301c9ca0-6423-4b91-bed2-7f63bbd62dbd service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Received event network-vif-plugged-de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1390.636415] env[62816]: DEBUG oslo_concurrency.lockutils [req-d2760231-afc6-47df-a639-0a652878696e req-301c9ca0-6423-4b91-bed2-7f63bbd62dbd service nova] Acquiring lock "f6ddaab3-d420-4ee4-bf75-486228826635-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.636415] env[62816]: DEBUG oslo_concurrency.lockutils [req-d2760231-afc6-47df-a639-0a652878696e req-301c9ca0-6423-4b91-bed2-7f63bbd62dbd service nova] Lock "f6ddaab3-d420-4ee4-bf75-486228826635-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.636415] env[62816]: DEBUG oslo_concurrency.lockutils [req-d2760231-afc6-47df-a639-0a652878696e req-301c9ca0-6423-4b91-bed2-7f63bbd62dbd service nova] Lock "f6ddaab3-d420-4ee4-bf75-486228826635-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.636415] env[62816]: DEBUG nova.compute.manager [req-d2760231-afc6-47df-a639-0a652878696e req-301c9ca0-6423-4b91-bed2-7f63bbd62dbd service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] No waiting events found dispatching network-vif-plugged-de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1390.636415] env[62816]: WARNING nova.compute.manager [req-d2760231-afc6-47df-a639-0a652878696e req-301c9ca0-6423-4b91-bed2-7f63bbd62dbd service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Received unexpected event network-vif-plugged-de736438-152f-4337-ae73-74024c1cac15 for instance with vm_state building and task_state spawning. [ 1390.730389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1390.730389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.800229] env[62816]: DEBUG oslo_vmware.api [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1787940, 'name': PowerOnVM_Task, 'duration_secs': 0.748048} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.800688] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1390.800788] env[62816]: INFO nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Took 15.31 seconds to spawn the instance on the hypervisor. [ 1390.800916] env[62816]: DEBUG nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1390.801768] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d8e3a9-e910-41b0-a1d2-c99cad29ae41 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.845025] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52312d53-9042-1ae3-734e-b748429a9af1, 'name': SearchDatastore_Task, 'duration_secs': 0.094796} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.845025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.845025] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.845025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.845374] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.845374] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.845374] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dc9d3ea-4cd8-4b0f-83ca-7ad2b213676b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.853943] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.853943] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.855961] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e1830b3-3787-41f5-9b50-90a120dc8337 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.862029] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1390.862029] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524a8a01-4cf4-e8e8-218c-fd7536964220" [ 1390.862029] env[62816]: _type = "Task" [ 1390.862029] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.878782] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787947, 'name': CreateVM_Task, 'duration_secs': 0.464813} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.879721] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1390.882348] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524a8a01-4cf4-e8e8-218c-fd7536964220, 'name': SearchDatastore_Task, 'duration_secs': 0.011643} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.882993] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.883167] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.883480] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1390.884338] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7b4abb2-6b04-4503-8d90-9a5d9e582872 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.890947] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc0bcbab-8b85-40b0-a3ef-c9208ca9bd05 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.899127] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1390.899127] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52334220-c3a5-22dc-f746-c4a4fdd3888b" [ 1390.899127] env[62816]: _type = "Task" [ 1390.899127] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.904907] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639359} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.904907] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1390.904907] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524fe46d-435b-2d59-59c6-a07fc76da573" [ 1390.904907] env[62816]: _type = "Task" [ 1390.904907] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.909193] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1390.909193] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1390.913821] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd1b4de6-c4ed-4377-b8ca-6e6115fdf90b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.918154] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787948, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.923724] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52334220-c3a5-22dc-f746-c4a4fdd3888b, 'name': SearchDatastore_Task, 'duration_secs': 0.011928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.928901] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.929209] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.929436] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.929730] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1390.929730] env[62816]: value = "task-1787950" [ 1390.929730] env[62816]: _type = "Task" [ 1390.929730] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.933319] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524fe46d-435b-2d59-59c6-a07fc76da573, 'name': SearchDatastore_Task, 'duration_secs': 0.010989} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.933319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.933319] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1390.935524] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.935840] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.936111] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6724db68-2ea0-4622-8050-d69a2bc67416 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.938526] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75a6844a-6219-4690-a3b4-fb008385635d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.945990] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787950, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.947564] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1390.947564] env[62816]: value = "task-1787951" [ 1390.947564] env[62816]: _type = "Task" [ 1390.947564] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.952253] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.952433] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.956155] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61366b11-4b5d-41f4-bd7a-00de011e4c1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.959282] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.960511] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1390.965206] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787951, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.965500] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.679s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1390.967801] env[62816]: INFO nova.compute.claims [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1390.975019] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1390.975019] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52df28fb-dab3-a9df-b184-2911c770b0e7" [ 1390.975019] env[62816]: _type = "Task" [ 1390.975019] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.982796] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52df28fb-dab3-a9df-b184-2911c770b0e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.113335] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787949, 'name': Rename_Task, 'duration_secs': 0.261756} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.113765] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.114058] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7521783-cca3-4cce-abe4-58c25885a28a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.121023] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1391.121023] env[62816]: value = "task-1787952" [ 1391.121023] env[62816]: _type = "Task" [ 1391.121023] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.129640] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.329137] env[62816]: INFO nova.compute.manager [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Took 23.35 seconds to build instance. [ 1391.405931] env[62816]: DEBUG oslo_vmware.api [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1787948, 'name': PowerOnVM_Task, 'duration_secs': 0.554506} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.406350] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1391.406947] env[62816]: INFO nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Took 13.58 seconds to spawn the instance on the hypervisor. [ 1391.406947] env[62816]: DEBUG nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1391.408064] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d626d1f-a13a-45d2-b3dd-0e101f65381b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.445712] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787950, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.203785} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.446090] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1391.447064] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb45ca74-80b6-45d6-a230-5d80518e28ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.469197] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787951, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.480526] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1391.482231] env[62816]: DEBUG nova.compute.utils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.489603] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db6b8797-28c0-4388-93e1-87a9bbe83c02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.508411] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1391.508750] env[62816]: DEBUG nova.network.neutron [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.526047] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52df28fb-dab3-a9df-b184-2911c770b0e7, 'name': SearchDatastore_Task, 'duration_secs': 0.016844} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.527596] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1391.527596] env[62816]: value = "task-1787953" [ 1391.527596] env[62816]: _type = "Task" [ 1391.527596] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.528542] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06bb6a82-14ac-47d5-a40c-228bdd7a42a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.540583] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1391.540583] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528d0558-0336-4b8b-637d-3fb9d4570d92" [ 1391.540583] env[62816]: _type = "Task" [ 1391.540583] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.546155] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787953, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.558035] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528d0558-0336-4b8b-637d-3fb9d4570d92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.632974] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.633939] env[62816]: DEBUG nova.policy [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '60305fc4159c4c26b25a7381a0055b39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '865c2861f9a745f59e7ed2bc0d2ac48b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1391.718447] env[62816]: DEBUG nova.network.neutron [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Successfully updated port: c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1391.832660] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fd3c950-e63b-4281-b78e-8e3c4dd9bb12 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.862s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.930848] env[62816]: INFO nova.compute.manager [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Took 19.61 seconds to build instance. [ 1391.966843] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787951, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.012803] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1392.047664] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787953, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.061022] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528d0558-0336-4b8b-637d-3fb9d4570d92, 'name': SearchDatastore_Task, 'duration_secs': 0.065542} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.061022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1392.061022] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f6ddaab3-d420-4ee4-bf75-486228826635/f6ddaab3-d420-4ee4-bf75-486228826635.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1392.061022] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97ba7419-b9eb-4f0c-8d4b-1915f6ba1c1b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.076224] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1392.076224] env[62816]: value = "task-1787954" [ 1392.076224] env[62816]: _type = "Task" [ 1392.076224] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.086875] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.142359] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787952, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.221541] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "refresh_cache-de33d02f-7e34-4619-a2ed-cda6c54aa030" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1392.221797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquired lock "refresh_cache-de33d02f-7e34-4619-a2ed-cda6c54aa030" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.221915] env[62816]: DEBUG nova.network.neutron [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1392.337193] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1392.434137] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a0aab70-854a-476d-80fc-0193be8f5ccf tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.128s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.469975] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787951, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.477240] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c910b7-0dc3-4e02-bf71-3e892f64e190 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.484808] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb325c74-cddf-4620-89e1-2cb613953e79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.524714] env[62816]: DEBUG nova.network.neutron [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Successfully created port: b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.527551] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142f6d94-27ef-4d98-bde5-d58d1d7fd840 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.540750] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9501ed41-2518-4869-99b9-7f88362f1a81 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.553266] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787953, 'name': ReconfigVM_Task, 'duration_secs': 0.667127} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.558662] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Reconfigured VM instance instance-00000005 to attach disk [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3/ce527ce8-07b6-47a6-bab9-7934a3dda9b3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1392.559408] env[62816]: DEBUG nova.compute.provider_tree [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1392.561874] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a953408c-3403-4d8f-96bc-5234039ef74a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.569380] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1392.569380] env[62816]: value = "task-1787955" [ 1392.569380] env[62816]: _type = "Task" [ 1392.569380] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.583398] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787955, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.589027] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.633985] env[62816]: DEBUG oslo_vmware.api [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787952, 'name': PowerOnVM_Task, 'duration_secs': 1.078718} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.633985] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.633985] env[62816]: INFO nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Took 9.92 seconds to spawn the instance on the hypervisor. [ 1392.633985] env[62816]: DEBUG nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1392.634727] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9cd6a9b-f846-45a4-8a77-d3a18638cca2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.732959] env[62816]: DEBUG nova.network.neutron [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Successfully updated port: 4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1392.769525] env[62816]: DEBUG nova.network.neutron [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1392.865209] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.865853] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.878243] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.938953] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1392.970516] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787951, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.699924} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.970735] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1392.970954] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1392.971257] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0446a3e0-7e47-48e9-a2ca-b78c8e90f5b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.981531] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1392.981531] env[62816]: value = "task-1787956" [ 1392.981531] env[62816]: _type = "Task" [ 1392.981531] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.998488] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787956, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.029351] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1393.060449] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1393.060874] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1393.060994] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1393.063062] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1393.063062] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1393.063151] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1393.063467] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1393.063619] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1393.063774] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1393.064106] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1393.064267] env[62816]: DEBUG nova.virt.hardware [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1393.074181] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4576f846-bff5-463d-8e7b-f8a542b1ba75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.085607] env[62816]: DEBUG nova.network.neutron [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Updating instance_info_cache with network_info: [{"id": "c8a5f26b-46d0-41b0-b233-6fb55f960d71", "address": "fa:16:3e:33:8e:fb", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a5f26b-46", "ovs_interfaceid": "c8a5f26b-46d0-41b0-b233-6fb55f960d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.110652] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6d2f2a-87a1-47e2-9947-e024b420325b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.114408] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787955, 'name': Rename_Task, 'duration_secs': 0.238131} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.114690] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787954, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.116227] env[62816]: ERROR nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [req-03f877b0-f0fc-4b98-a8fc-936578ec9d1a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-03f877b0-f0fc-4b98-a8fc-936578ec9d1a"}]} [ 1393.116622] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1393.120044] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7e846f6-facf-412c-8d71-04b59b77bd8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.136605] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1393.136605] env[62816]: value = "task-1787957" [ 1393.136605] env[62816]: _type = "Task" [ 1393.136605] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.137669] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1393.154924] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787957, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.155328] env[62816]: INFO nova.compute.manager [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Took 20.24 seconds to build instance. [ 1393.160263] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1393.160777] env[62816]: DEBUG nova.compute.provider_tree [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1393.182431] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1393.206611] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1393.237900] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "refresh_cache-455052cc-292a-414c-8c83-bc512c49a197" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1393.238063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "refresh_cache-455052cc-292a-414c-8c83-bc512c49a197" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1393.238223] env[62816]: DEBUG nova.network.neutron [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1393.365196] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "cf6ff174-1324-42bd-a77a-905b9a333c27" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.365438] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.457828] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1393.469741] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.494740] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787956, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086779} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.494740] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1393.495536] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9b49ee-ad29-4ad7-bf5c-2343b43bde8b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.520626] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1393.523887] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06cc231b-7a91-4a1d-8f75-d89c9b32d1a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.544606] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1393.544606] env[62816]: value = "task-1787958" [ 1393.544606] env[62816]: _type = "Task" [ 1393.544606] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.553238] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787958, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.591555] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Releasing lock "refresh_cache-de33d02f-7e34-4619-a2ed-cda6c54aa030" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1393.591882] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Instance network_info: |[{"id": "c8a5f26b-46d0-41b0-b233-6fb55f960d71", "address": "fa:16:3e:33:8e:fb", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a5f26b-46", "ovs_interfaceid": "c8a5f26b-46d0-41b0-b233-6fb55f960d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1393.595668] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:8e:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8a5f26b-46d0-41b0-b233-6fb55f960d71', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.604575] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Creating folder: Project (6ace0e7bd9da4b57b3a05cd6d1b86dfb). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.604905] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c47a09d-c1ff-409c-8e14-ff0d72ed4f68 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.617362] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787954, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.141088} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.618794] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f6ddaab3-d420-4ee4-bf75-486228826635/f6ddaab3-d420-4ee4-bf75-486228826635.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1393.619054] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1393.619455] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Created folder: Project (6ace0e7bd9da4b57b3a05cd6d1b86dfb) in parent group-v370905. [ 1393.619561] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Creating folder: Instances. Parent ref: group-v370943. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1393.619823] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ef10a64-e53d-491e-9842-555937774bf2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.622344] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7533fd5e-00fb-4732-b622-5fe16db9ada2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.633757] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1393.633757] env[62816]: value = "task-1787960" [ 1393.633757] env[62816]: _type = "Task" [ 1393.633757] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.646282] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Created folder: Instances in parent group-v370943. [ 1393.646282] env[62816]: DEBUG oslo.service.loopingcall [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.651093] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1393.654591] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01e8e0f5-0faf-4fb8-9bad-d6034d4fbff0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.675019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad2e0d4d-cef2-4b52-9802-4dc8b30ab69b tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "66745316-2735-4c49-b1a2-f9e547211761" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.769s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1393.675019] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787960, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.679118] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787957, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.680523] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.680523] env[62816]: value = "task-1787962" [ 1393.680523] env[62816]: _type = "Task" [ 1393.680523] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.685214] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336a6307-a151-487c-aa6e-2c3fb3c94fe6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.693419] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787962, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.697297] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa832d0e-a1e4-48a6-8068-27fd609daff3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.729493] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca560727-db7c-4262-adb3-bb0c38882e22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.738565] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49da9ce7-c0a8-4fab-9bc3-4cd10606c854 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.759552] env[62816]: DEBUG nova.compute.provider_tree [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1393.798160] env[62816]: DEBUG nova.network.neutron [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1394.052431] env[62816]: DEBUG nova.network.neutron [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Updating instance_info_cache with network_info: [{"id": "4d66ddae-3da1-44ea-a583-74e70147b7ac", "address": "fa:16:3e:ad:fb:c3", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d66ddae-3d", "ovs_interfaceid": "4d66ddae-3da1-44ea-a583-74e70147b7ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.057043] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787958, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.143144] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787960, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115978} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.143584] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1394.144485] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc38cad-a291-478e-b403-77652c73e481 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.157019] env[62816]: DEBUG oslo_vmware.api [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787957, 'name': PowerOnVM_Task, 'duration_secs': 0.71018} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.167880] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1394.167880] env[62816]: DEBUG nova.compute.manager [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1394.176018] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] f6ddaab3-d420-4ee4-bf75-486228826635/f6ddaab3-d420-4ee4-bf75-486228826635.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1394.176816] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25872c30-dfb2-4f54-85fa-e8e661c11705 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.179311] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c7ab6ed-1318-4c21-9cb4-fbf424b0cfe4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.194593] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1394.209766] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1394.209766] env[62816]: value = "task-1787963" [ 1394.209766] env[62816]: _type = "Task" [ 1394.209766] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.217457] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787962, 'name': CreateVM_Task, 'duration_secs': 0.416916} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.219104] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1394.219104] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.219493] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.219830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1394.223516] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-266c387e-75ce-41f2-9b8d-22074706994c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.225601] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.229207] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1394.229207] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e72ac1-75c3-06e6-3e9a-e2af42f3e4f5" [ 1394.229207] env[62816]: _type = "Task" [ 1394.229207] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.238049] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e72ac1-75c3-06e6-3e9a-e2af42f3e4f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.289240] env[62816]: ERROR nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [req-23f49974-f3b9-4e70-b796-eae97c86af5d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-23f49974-f3b9-4e70-b796-eae97c86af5d"}]} [ 1394.309570] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1394.337464] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1394.337590] env[62816]: DEBUG nova.compute.provider_tree [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1394.355481] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1394.379427] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1394.456641] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1394.456641] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1394.555976] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "refresh_cache-455052cc-292a-414c-8c83-bc512c49a197" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.555976] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Instance network_info: |[{"id": "4d66ddae-3da1-44ea-a583-74e70147b7ac", "address": "fa:16:3e:ad:fb:c3", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d66ddae-3d", "ovs_interfaceid": "4d66ddae-3da1-44ea-a583-74e70147b7ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1394.556943] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:fb:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d66ddae-3da1-44ea-a583-74e70147b7ac', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1394.565952] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating folder: Project (2d830983a3c14168b8f0b67478f27589). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1394.569886] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-685a8d62-6490-4970-8ba4-0fc5b96db1fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.571846] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787958, 'name': ReconfigVM_Task, 'duration_secs': 0.568335} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.572191] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781/7be4c8f8-240c-4a71-93bb-aeb94243d781.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1394.575504] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9079ac8e-fdf8-4a58-b77a-814d68ae2b56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.587086] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created folder: Project (2d830983a3c14168b8f0b67478f27589) in parent group-v370905. [ 1394.587294] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating folder: Instances. Parent ref: group-v370946. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1394.587592] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1394.587592] env[62816]: value = "task-1787965" [ 1394.587592] env[62816]: _type = "Task" [ 1394.587592] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.587769] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b99f4bf9-0c6b-48f2-81f4-aadfd882e455 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.602921] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787965, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.607225] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created folder: Instances in parent group-v370946. [ 1394.607488] env[62816]: DEBUG oslo.service.loopingcall [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1394.607684] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1394.607904] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0b3f2ba-b945-4555-b4bb-68f75964e7c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.627662] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1394.627662] env[62816]: value = "task-1787967" [ 1394.627662] env[62816]: _type = "Task" [ 1394.627662] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.648159] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.648159] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.648159] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787967, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.658462] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Received event network-changed-de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.658462] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Refreshing instance network info cache due to event network-changed-de736438-152f-4337-ae73-74024c1cac15. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1394.659096] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Acquiring lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.659096] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Acquired lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.659096] env[62816]: DEBUG nova.network.neutron [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Refreshing network info cache for port de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1394.725042] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.731592] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787963, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.731592] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.744435] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e72ac1-75c3-06e6-3e9a-e2af42f3e4f5, 'name': SearchDatastore_Task, 'duration_secs': 0.025025} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.744764] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1394.745012] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.745278] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1394.745693] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.746191] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1394.746365] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f99e65a3-9cad-4925-b4d5-cebfe9d5428c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.758854] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1394.758854] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1394.760036] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d1b924a-087f-4726-931a-d9c8be1058b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.765399] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1394.765399] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52688a8a-f60d-bc9a-0fa8-26b2da71f975" [ 1394.765399] env[62816]: _type = "Task" [ 1394.765399] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.776504] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52688a8a-f60d-bc9a-0fa8-26b2da71f975, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.845495] env[62816]: DEBUG nova.network.neutron [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Successfully updated port: b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.905647] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82be1bfa-e5fe-443d-95b0-1f9933d6ad1b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.915782] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd728469-916d-411d-88b7-3ac012d920ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.955423] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4d0e2e-9f0d-44a3-9d33-54c282a84619 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.964309] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2d7c22-48cc-495d-ac7b-86a372a3400a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.984133] env[62816]: DEBUG nova.compute.provider_tree [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1395.102442] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787965, 'name': Rename_Task, 'duration_secs': 0.252016} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.102712] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1395.102955] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-498fdf86-e255-4a48-b4a9-f1ed4034c0f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.111383] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1395.111383] env[62816]: value = "task-1787968" [ 1395.111383] env[62816]: _type = "Task" [ 1395.111383] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.124256] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787968, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.139199] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787967, 'name': CreateVM_Task, 'duration_secs': 0.45301} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.139199] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1395.139991] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.140348] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.140753] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1395.141109] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c5ef45c-c74a-468b-80a7-d1eea2ca3f34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.146350] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1395.146350] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b98d0c-32e1-c130-3051-ae860a4a57de" [ 1395.146350] env[62816]: _type = "Task" [ 1395.146350] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.156111] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b98d0c-32e1-c130-3051-ae860a4a57de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.233716] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787963, 'name': ReconfigVM_Task, 'duration_secs': 0.725956} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.233983] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Reconfigured VM instance instance-0000000b to attach disk [datastore1] f6ddaab3-d420-4ee4-bf75-486228826635/f6ddaab3-d420-4ee4-bf75-486228826635.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1395.234624] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5579837-d14f-486e-badb-b34d865d3ba7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.241638] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1395.241638] env[62816]: value = "task-1787969" [ 1395.241638] env[62816]: _type = "Task" [ 1395.241638] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.251172] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787969, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.275733] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52688a8a-f60d-bc9a-0fa8-26b2da71f975, 'name': SearchDatastore_Task, 'duration_secs': 0.01734} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.276537] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8224d7b7-dee2-42df-87cd-973a2ce5c034 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.282269] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1395.282269] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5277be23-fd7b-5483-cf86-cac8b69e1de7" [ 1395.282269] env[62816]: _type = "Task" [ 1395.282269] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.290964] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5277be23-fd7b-5483-cf86-cac8b69e1de7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.350139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.350304] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquired lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.350504] env[62816]: DEBUG nova.network.neutron [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.454622] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1395.491061] env[62816]: DEBUG nova.scheduler.client.report [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1395.593209] env[62816]: DEBUG nova.network.neutron [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updated VIF entry in instance network info cache for port de736438-152f-4337-ae73-74024c1cac15. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1395.593641] env[62816]: DEBUG nova.network.neutron [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updating instance_info_cache with network_info: [{"id": "de736438-152f-4337-ae73-74024c1cac15", "address": "fa:16:3e:af:7e:d2", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde736438-15", "ovs_interfaceid": "de736438-152f-4337-ae73-74024c1cac15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.623439] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787968, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.657844] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b98d0c-32e1-c130-3051-ae860a4a57de, 'name': SearchDatastore_Task, 'duration_secs': 0.010646} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.658032] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.658279] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1395.658489] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.683329] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "0e0261fe-4376-487c-9d54-c4f37577409c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.683818] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0e0261fe-4376-487c-9d54-c4f37577409c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1395.753242] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787969, 'name': Rename_Task, 'duration_secs': 0.273256} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.753601] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1395.753903] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4af58f43-b6be-4454-b22d-29fe92088d76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.762055] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1395.762055] env[62816]: value = "task-1787970" [ 1395.762055] env[62816]: _type = "Task" [ 1395.762055] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.770099] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.793148] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5277be23-fd7b-5483-cf86-cac8b69e1de7, 'name': SearchDatastore_Task, 'duration_secs': 0.009041} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.793594] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1395.793980] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] de33d02f-7e34-4619-a2ed-cda6c54aa030/de33d02f-7e34-4619-a2ed-cda6c54aa030.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1395.794348] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.794684] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1395.794947] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a08154d-98c6-4a5f-a98d-cfac86f6999f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.799941] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e630f1e3-1af4-494f-a6b2-299952fd67ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.806390] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1395.806390] env[62816]: value = "task-1787971" [ 1395.806390] env[62816]: _type = "Task" [ 1395.806390] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.811767] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1395.811965] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1395.819103] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10b9b4eb-bca6-4e9d-ab02-85f67ca4f3b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.826021] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.828954] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1395.828954] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529f0106-64d3-6a64-fa39-695cc56267f1" [ 1395.828954] env[62816]: _type = "Task" [ 1395.828954] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.837901] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529f0106-64d3-6a64-fa39-695cc56267f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.928058] env[62816]: DEBUG nova.network.neutron [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.001029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.035s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.002907] env[62816]: DEBUG nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1396.006416] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.591s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.008789] env[62816]: INFO nova.compute.claims [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1396.096812] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Releasing lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.097231] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Received event network-vif-plugged-c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.097443] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Acquiring lock "de33d02f-7e34-4619-a2ed-cda6c54aa030-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.097661] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.097866] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.098039] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] No waiting events found dispatching network-vif-plugged-c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1396.098213] env[62816]: WARNING nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Received unexpected event network-vif-plugged-c8a5f26b-46d0-41b0-b233-6fb55f960d71 for instance with vm_state building and task_state spawning. [ 1396.098379] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Received event network-changed-c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.098532] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Refreshing instance network info cache due to event network-changed-c8a5f26b-46d0-41b0-b233-6fb55f960d71. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1396.098715] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Acquiring lock "refresh_cache-de33d02f-7e34-4619-a2ed-cda6c54aa030" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.098849] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Acquired lock "refresh_cache-de33d02f-7e34-4619-a2ed-cda6c54aa030" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.099025] env[62816]: DEBUG nova.network.neutron [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Refreshing network info cache for port c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.131187] env[62816]: DEBUG oslo_vmware.api [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787968, 'name': PowerOnVM_Task, 'duration_secs': 0.828622} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.131187] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1396.131187] env[62816]: DEBUG nova.compute.manager [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1396.131774] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214fc315-47ea-4061-aaea-923596a80f24 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.153539] env[62816]: DEBUG nova.compute.manager [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Received event network-changed-7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.153539] env[62816]: DEBUG nova.compute.manager [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Refreshing instance network info cache due to event network-changed-7b35c8f0-5f21-4920-93b4-f88823b815ab. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1396.153539] env[62816]: DEBUG oslo_concurrency.lockutils [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] Acquiring lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.153539] env[62816]: DEBUG oslo_concurrency.lockutils [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] Acquired lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.153539] env[62816]: DEBUG nova.network.neutron [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Refreshing network info cache for port 7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.183144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.183423] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.278956] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.317225] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787971, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.339045] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529f0106-64d3-6a64-fa39-695cc56267f1, 'name': SearchDatastore_Task, 'duration_secs': 0.037352} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.340557] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9702c262-57a3-4c55-bcfe-2ea57cf26a3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.347513] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1396.347513] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52be2b21-e834-7d62-51c1-23b15db890b4" [ 1396.347513] env[62816]: _type = "Task" [ 1396.347513] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.361253] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52be2b21-e834-7d62-51c1-23b15db890b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.517022] env[62816]: DEBUG nova.compute.utils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1396.519063] env[62816]: DEBUG nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1396.579295] env[62816]: DEBUG nova.network.neutron [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updating instance_info_cache with network_info: [{"id": "b6ecd005-0fec-4275-91b7-0814f3514b40", "address": "fa:16:3e:ae:6d:f6", "network": {"id": "bf0dc49e-afa3-4f8d-a85b-51f2f60b1f0c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-155605247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865c2861f9a745f59e7ed2bc0d2ac48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ecd005-0f", "ovs_interfaceid": "b6ecd005-0fec-4275-91b7-0814f3514b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.654870] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.737425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.737425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.737425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.737425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1396.737605] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.738250] env[62816]: INFO nova.compute.manager [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Terminating instance [ 1396.740030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "refresh_cache-ce527ce8-07b6-47a6-bab9-7934a3dda9b3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.740195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquired lock "refresh_cache-ce527ce8-07b6-47a6-bab9-7934a3dda9b3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.740364] env[62816]: DEBUG nova.network.neutron [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1396.776204] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.821392] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787971, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644062} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.821710] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] de33d02f-7e34-4619-a2ed-cda6c54aa030/de33d02f-7e34-4619-a2ed-cda6c54aa030.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1396.821839] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1396.822122] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-dfb54a2e-94d8-42a6-956c-e310d44f33ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.829762] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1396.829762] env[62816]: value = "task-1787972" [ 1396.829762] env[62816]: _type = "Task" [ 1396.829762] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.843856] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.860893] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52be2b21-e834-7d62-51c1-23b15db890b4, 'name': SearchDatastore_Task, 'duration_secs': 0.074927} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.860893] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.860893] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 455052cc-292a-414c-8c83-bc512c49a197/455052cc-292a-414c-8c83-bc512c49a197.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1396.860893] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fd3b131-f05f-4e9b-b11e-b5ed84326be5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.871509] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1396.871509] env[62816]: value = "task-1787973" [ 1396.871509] env[62816]: _type = "Task" [ 1396.871509] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.883754] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787973, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.009429] env[62816]: DEBUG nova.network.neutron [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Updated VIF entry in instance network info cache for port c8a5f26b-46d0-41b0-b233-6fb55f960d71. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.010374] env[62816]: DEBUG nova.network.neutron [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Updating instance_info_cache with network_info: [{"id": "c8a5f26b-46d0-41b0-b233-6fb55f960d71", "address": "fa:16:3e:33:8e:fb", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8a5f26b-46", "ovs_interfaceid": "c8a5f26b-46d0-41b0-b233-6fb55f960d71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.021761] env[62816]: DEBUG nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1397.086130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Releasing lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.086944] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Instance network_info: |[{"id": "b6ecd005-0fec-4275-91b7-0814f3514b40", "address": "fa:16:3e:ae:6d:f6", "network": {"id": "bf0dc49e-afa3-4f8d-a85b-51f2f60b1f0c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-155605247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865c2861f9a745f59e7ed2bc0d2ac48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ecd005-0f", "ovs_interfaceid": "b6ecd005-0fec-4275-91b7-0814f3514b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1397.090144] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ae:6d:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd88b750a-0e7d-4f16-8bd5-8e6d5743b720', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6ecd005-0fec-4275-91b7-0814f3514b40', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1397.099029] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Creating folder: Project (865c2861f9a745f59e7ed2bc0d2ac48b). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.100685] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5edf7e10-0e87-4aca-a664-217d735852b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.116898] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Created folder: Project (865c2861f9a745f59e7ed2bc0d2ac48b) in parent group-v370905. [ 1397.117204] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Creating folder: Instances. Parent ref: group-v370949. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1397.117533] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b76a7da-d319-4e2e-91c6-c4f1bfdc909b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.132341] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Created folder: Instances in parent group-v370949. [ 1397.133339] env[62816]: DEBUG oslo.service.loopingcall [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1397.133339] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1397.133339] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bb55fc5-8b4b-447c-8e40-0576d2d87c3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.162479] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1397.162479] env[62816]: value = "task-1787976" [ 1397.162479] env[62816]: _type = "Task" [ 1397.162479] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.180470] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787976, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.270500] env[62816]: DEBUG nova.network.neutron [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1397.277658] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.337645] env[62816]: DEBUG nova.network.neutron [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.348014] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.113599} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.348014] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1397.348014] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2f413b-bba8-4c49-83c9-6877e0e1d073 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.377901] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] de33d02f-7e34-4619-a2ed-cda6c54aa030/de33d02f-7e34-4619-a2ed-cda6c54aa030.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.381509] env[62816]: DEBUG nova.network.neutron [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updated VIF entry in instance network info cache for port 7b35c8f0-5f21-4920-93b4-f88823b815ab. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.381913] env[62816]: DEBUG nova.network.neutron [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updating instance_info_cache with network_info: [{"id": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "address": "fa:16:3e:c2:e3:73", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b35c8f0-5f", "ovs_interfaceid": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.383465] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05188f58-e425-4972-b45a-11140925d89d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.418226] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787973, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.420015] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1397.420015] env[62816]: value = "task-1787977" [ 1397.420015] env[62816]: _type = "Task" [ 1397.420015] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.432295] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787977, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.453033] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.517332] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Releasing lock "refresh_cache-de33d02f-7e34-4619-a2ed-cda6c54aa030" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.517332] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Received event network-vif-plugged-4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.517332] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Acquiring lock "455052cc-292a-414c-8c83-bc512c49a197-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.517332] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Lock "455052cc-292a-414c-8c83-bc512c49a197-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.517575] env[62816]: DEBUG oslo_concurrency.lockutils [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] Lock "455052cc-292a-414c-8c83-bc512c49a197-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.517610] env[62816]: DEBUG nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] No waiting events found dispatching network-vif-plugged-4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1397.517754] env[62816]: WARNING nova.compute.manager [req-bbbb0d44-1cd2-4e71-8938-b58f48917cbd req-f0ec5193-bfef-45c5-82fd-b23df90f76f4 service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Received unexpected event network-vif-plugged-4d66ddae-3da1-44ea-a583-74e70147b7ac for instance with vm_state building and task_state spawning. [ 1397.574620] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.574880] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.599889] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb565812-ae53-49c7-916b-b8c36bee0bd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.613504] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2cc79b-c4dd-4dfe-b119-1f89fa20d347 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.652319] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcae927a-c65f-4a22-9da6-17a8dd6044da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.660418] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92441e55-15f3-473d-b24f-7e4de8a99dc8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.676980] env[62816]: DEBUG nova.compute.provider_tree [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1397.681420] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787976, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.773602] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.842130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Releasing lock "refresh_cache-ce527ce8-07b6-47a6-bab9-7934a3dda9b3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.842588] env[62816]: DEBUG nova.compute.manager [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1397.842808] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1397.843768] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd04c1f3-d4b9-414b-ab95-6fdeb1a0fc93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.850957] env[62816]: DEBUG nova.compute.manager [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Received event network-changed-4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.851159] env[62816]: DEBUG nova.compute.manager [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Refreshing instance network info cache due to event network-changed-4d66ddae-3da1-44ea-a583-74e70147b7ac. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1397.851356] env[62816]: DEBUG oslo_concurrency.lockutils [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] Acquiring lock "refresh_cache-455052cc-292a-414c-8c83-bc512c49a197" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.851519] env[62816]: DEBUG oslo_concurrency.lockutils [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] Acquired lock "refresh_cache-455052cc-292a-414c-8c83-bc512c49a197" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.851681] env[62816]: DEBUG nova.network.neutron [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Refreshing network info cache for port 4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.855196] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1397.855656] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7aa15e2-499a-4778-b202-2a66ddcc7555 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.862788] env[62816]: DEBUG oslo_vmware.api [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1397.862788] env[62816]: value = "task-1787978" [ 1397.862788] env[62816]: _type = "Task" [ 1397.862788] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.871523] env[62816]: DEBUG oslo_vmware.api [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787978, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.893120] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787973, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.71958} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.893390] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 455052cc-292a-414c-8c83-bc512c49a197/455052cc-292a-414c-8c83-bc512c49a197.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1397.893609] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1397.893847] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16535399-f349-4b4d-a460-557217e9470d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.899607] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1397.899607] env[62816]: value = "task-1787979" [ 1397.899607] env[62816]: _type = "Task" [ 1397.899607] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.904756] env[62816]: DEBUG oslo_concurrency.lockutils [req-1c23fb98-0cc9-4e47-9c28-f937b253715c req-f89d331f-00ed-491d-adda-a34ca47c8e34 service nova] Releasing lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.908054] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.914949] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "0a1a8539-940a-4a17-9826-82736be41892" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.915183] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0a1a8539-940a-4a17-9826-82736be41892" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.929845] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787977, 'name': ReconfigVM_Task, 'duration_secs': 0.477714} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.930053] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Reconfigured VM instance instance-0000000c to attach disk [datastore1] de33d02f-7e34-4619-a2ed-cda6c54aa030/de33d02f-7e34-4619-a2ed-cda6c54aa030.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1397.930698] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-137dcefe-cb7f-47c3-b96c-736ff7706825 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.937356] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1397.937356] env[62816]: value = "task-1787980" [ 1397.937356] env[62816]: _type = "Task" [ 1397.937356] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.947347] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787980, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.960687] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1397.960981] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1398.036139] env[62816]: DEBUG nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1398.067117] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1398.067414] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1398.067538] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1398.067738] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1398.067854] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1398.068238] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1398.068238] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1398.068730] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1398.068730] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1398.068730] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1398.069062] env[62816]: DEBUG nova.virt.hardware [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1398.070014] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45865fdb-8a89-4698-8a26-6cba111bb18a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.077686] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7514bc-c3ee-47c0-b8eb-125faa75ab1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.094047] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1398.100143] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Creating folder: Project (c1aff70f7d554f9f88f0de5c2b43783f). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1398.100548] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20f0b520-b0dd-4bc7-8021-7fd9e1c5d191 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.110489] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Created folder: Project (c1aff70f7d554f9f88f0de5c2b43783f) in parent group-v370905. [ 1398.110695] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Creating folder: Instances. Parent ref: group-v370952. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1398.110934] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-21f49aa2-391b-4a8d-ac0e-81aed2b659c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.112808] env[62816]: INFO nova.compute.manager [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Rebuilding instance [ 1398.121389] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Created folder: Instances in parent group-v370952. [ 1398.125025] env[62816]: DEBUG oslo.service.loopingcall [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1398.125025] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1398.125025] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e06b03dd-fa7a-4442-905c-6db8dbeaefe0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.145174] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1398.145174] env[62816]: value = "task-1787983" [ 1398.145174] env[62816]: _type = "Task" [ 1398.145174] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.153280] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787983, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.162254] env[62816]: DEBUG nova.compute.manager [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1398.162901] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd927b9-dfe7-46a8-a135-85c6ef728b6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.178207] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787976, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.183390] env[62816]: DEBUG nova.scheduler.client.report [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1398.273854] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.374247] env[62816]: DEBUG oslo_vmware.api [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787978, 'name': PowerOffVM_Task, 'duration_secs': 0.228158} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.374952] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1398.374952] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1398.375217] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e068292-d5b1-489d-958b-533547b6987d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.405968] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1398.406220] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1398.406503] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Deleting the datastore file [datastore1] ce527ce8-07b6-47a6-bab9-7934a3dda9b3 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1398.407340] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bef0c304-a0a0-4ab1-a9da-bedd3a30a09f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.412607] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120276} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.413252] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1398.414059] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1141adb8-b389-466f-be43-03b7f064f08f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.418231] env[62816]: DEBUG oslo_vmware.api [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for the task: (returnval){ [ 1398.418231] env[62816]: value = "task-1787985" [ 1398.418231] env[62816]: _type = "Task" [ 1398.418231] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.443161] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 455052cc-292a-414c-8c83-bc512c49a197/455052cc-292a-414c-8c83-bc512c49a197.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1398.444594] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8beb2bb4-2cd0-4504-a5b3-3018219aaab0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.471892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.472243] env[62816]: DEBUG oslo_vmware.api [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.478846] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787980, 'name': Rename_Task, 'duration_secs': 0.153094} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.480698] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1398.481261] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1398.481261] env[62816]: value = "task-1787986" [ 1398.481261] env[62816]: _type = "Task" [ 1398.481261] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.481261] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0af06770-2bd8-4edd-b7cf-fce900d9298e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.494092] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787986, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.495820] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1398.495820] env[62816]: value = "task-1787987" [ 1398.495820] env[62816]: _type = "Task" [ 1398.495820] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.507974] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787987, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.581677] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.583330] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.351326] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1399.352109] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.346s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.352603] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1399.357363] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787983, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.357502] env[62816]: WARNING oslo_vmware.common.loopingcall [-] task run outlasted interval by 0.210916 sec [ 1399.363258] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46dc6668-f807-40d4-a0c1-25492562f83e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.366084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.726s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.368016] env[62816]: DEBUG nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Received event network-vif-plugged-b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.368210] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Acquiring lock "fb84cb48-d1a1-4eec-adb8-8edc585263df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.368414] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.368573] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.368734] env[62816]: DEBUG nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] No waiting events found dispatching network-vif-plugged-b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1399.368886] env[62816]: WARNING nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Received unexpected event network-vif-plugged-b6ecd005-0fec-4275-91b7-0814f3514b40 for instance with vm_state building and task_state spawning. [ 1399.369048] env[62816]: DEBUG nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Received event network-changed-b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.369200] env[62816]: DEBUG nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Refreshing instance network info cache due to event network-changed-b6ecd005-0fec-4275-91b7-0814f3514b40. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1399.369400] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Acquiring lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.369575] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Acquired lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.370756] env[62816]: DEBUG nova.network.neutron [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Refreshing network info cache for port b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1399.386315] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "e1067d45-1938-4021-b902-21a1aa57058a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.386651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "e1067d45-1938-4021-b902-21a1aa57058a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.405466] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787976, 'name': CreateVM_Task, 'duration_secs': 1.611945} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.405749] env[62816]: DEBUG oslo_vmware.api [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1787970, 'name': PowerOnVM_Task, 'duration_secs': 2.991889} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.413956] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.414278] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1399.414523] env[62816]: INFO nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Took 14.12 seconds to spawn the instance on the hypervisor. [ 1399.414708] env[62816]: DEBUG nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1399.415362] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1399.415362] env[62816]: value = "task-1787988" [ 1399.415362] env[62816]: _type = "Task" [ 1399.415362] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.415572] env[62816]: DEBUG oslo_vmware.api [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Task: {'id': task-1787985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097473} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.416042] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787986, 'name': ReconfigVM_Task, 'duration_secs': 0.663436} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.419743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.419908] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.420244] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.421017] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb2dbef-73a3-4f7f-809f-1721f13c93b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.423754] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1399.423936] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1399.424126] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1399.424293] env[62816]: INFO nova.compute.manager [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Took 1.58 seconds to destroy the instance on the hypervisor. [ 1399.424518] env[62816]: DEBUG oslo.service.loopingcall [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.424822] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 455052cc-292a-414c-8c83-bc512c49a197/455052cc-292a-414c-8c83-bc512c49a197.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1399.425636] env[62816]: DEBUG oslo_vmware.api [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1787987, 'name': PowerOnVM_Task, 'duration_secs': 0.542095} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.428753] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41ed054c-e213-4964-9d1c-c6d4f02336c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.434150] env[62816]: DEBUG nova.compute.manager [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1399.434304] env[62816]: DEBUG nova.network.neutron [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1399.435853] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-12dd82ef-fd00-4b93-9651-fc940a720b0b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.439286] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1399.439286] env[62816]: INFO nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Took 11.67 seconds to spawn the instance on the hypervisor. [ 1399.439286] env[62816]: DEBUG nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1399.439286] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787983, 'name': CreateVM_Task, 'duration_secs': 0.921296} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.439819] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878e5085-66dd-4f46-9950-86c055b03f2b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.442152] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.445177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.452676] env[62816]: DEBUG nova.network.neutron [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Updated VIF entry in instance network info cache for port 4d66ddae-3da1-44ea-a583-74e70147b7ac. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1399.453031] env[62816]: DEBUG nova.network.neutron [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Updating instance_info_cache with network_info: [{"id": "4d66ddae-3da1-44ea-a583-74e70147b7ac", "address": "fa:16:3e:ad:fb:c3", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d66ddae-3d", "ovs_interfaceid": "4d66ddae-3da1-44ea-a583-74e70147b7ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.455718] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.456358] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1399.456358] env[62816]: value = "task-1787989" [ 1399.456358] env[62816]: _type = "Task" [ 1399.456358] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.456358] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1399.456358] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5264a54d-36c1-26a4-3a50-73dde3eed387" [ 1399.456358] env[62816]: _type = "Task" [ 1399.456358] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.468821] env[62816]: DEBUG nova.network.neutron [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1399.477433] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5264a54d-36c1-26a4-3a50-73dde3eed387, 'name': SearchDatastore_Task, 'duration_secs': 0.023579} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.480772] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.480955] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1399.481204] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.481352] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.481616] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1399.482170] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787989, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.482904] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.482904] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.483072] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e25df4f-34c0-4774-be0e-7b96ba97fe21 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.484839] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dffab289-ba95-41f5-9a06-ca08e97132ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.490893] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1399.490893] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52eb86ed-e8c4-0012-8276-d1aa3a12b3ed" [ 1399.490893] env[62816]: _type = "Task" [ 1399.490893] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.496766] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1399.496959] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1399.498022] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c7bede2-5510-4c03-8734-81302051fe6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.503824] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52eb86ed-e8c4-0012-8276-d1aa3a12b3ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.507190] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1399.507190] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528368b4-77e4-38c7-88ed-2dbb89c24d22" [ 1399.507190] env[62816]: _type = "Task" [ 1399.507190] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.514910] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528368b4-77e4-38c7-88ed-2dbb89c24d22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.587774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.587774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.861755] env[62816]: DEBUG nova.compute.utils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1399.863256] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1399.863436] env[62816]: DEBUG nova.network.neutron [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1399.895315] env[62816]: INFO nova.compute.claims [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1399.922790] env[62816]: DEBUG nova.policy [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0af00d6302f455988732c7568cf84c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '005f772e517340a0acaac0d61b8262df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1399.949254] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787988, 'name': PowerOffVM_Task, 'duration_secs': 0.182249} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.950347] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1399.950347] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1399.951626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15dc966-183a-4251-b730-484e59b085e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.970476] env[62816]: DEBUG oslo_concurrency.lockutils [req-905306e2-89a9-4948-a339-936aafaadb37 req-16337ed6-497f-4d51-b680-1bb6b6cf076b service nova] Releasing lock "refresh_cache-455052cc-292a-414c-8c83-bc512c49a197" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.971761] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1399.975131] env[62816]: DEBUG nova.network.neutron [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.987181] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ceff10f-2585-429d-932c-eeed57faa7ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.989960] env[62816]: INFO nova.compute.manager [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Took 20.89 seconds to build instance. [ 1399.993192] env[62816]: INFO nova.compute.manager [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Took 20.56 seconds to build instance. [ 1400.003351] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787989, 'name': Rename_Task, 'duration_secs': 0.237242} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.005340] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.005340] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c00bb47e-44fc-4fc7-9950-1fab86c97a53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.010376] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52eb86ed-e8c4-0012-8276-d1aa3a12b3ed, 'name': SearchDatastore_Task, 'duration_secs': 0.024852} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.015354] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.015692] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.015998] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.018482] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1400.018482] env[62816]: value = "task-1787991" [ 1400.018482] env[62816]: _type = "Task" [ 1400.018482] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.030024] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528368b4-77e4-38c7-88ed-2dbb89c24d22, 'name': SearchDatastore_Task, 'duration_secs': 0.013799} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.034932] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1400.034932] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1400.035116] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Deleting the datastore file [datastore1] 66745316-2735-4c49-b1a2-f9e547211761 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1400.040038] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2fa37e9-aadf-4585-904c-c784d29f0359 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.043151] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-351a1e7d-6873-48c7-8b2c-6ac0c4b65e15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.047153] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787991, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.052230] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1400.052230] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524429d6-6989-eadb-a719-36d3e0c09fbe" [ 1400.052230] env[62816]: _type = "Task" [ 1400.052230] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.053429] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1400.053429] env[62816]: value = "task-1787992" [ 1400.053429] env[62816]: _type = "Task" [ 1400.053429] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.076592] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524429d6-6989-eadb-a719-36d3e0c09fbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.077816] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787992, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.340329] env[62816]: DEBUG nova.network.neutron [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updated VIF entry in instance network info cache for port b6ecd005-0fec-4275-91b7-0814f3514b40. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1400.340616] env[62816]: DEBUG nova.network.neutron [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updating instance_info_cache with network_info: [{"id": "b6ecd005-0fec-4275-91b7-0814f3514b40", "address": "fa:16:3e:ae:6d:f6", "network": {"id": "bf0dc49e-afa3-4f8d-a85b-51f2f60b1f0c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-155605247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865c2861f9a745f59e7ed2bc0d2ac48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ecd005-0f", "ovs_interfaceid": "b6ecd005-0fec-4275-91b7-0814f3514b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1400.369393] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1400.399149] env[62816]: INFO nova.compute.resource_tracker [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating resource usage from migration de83f004-df93-40fc-a350-b89d170de652 [ 1400.425030] env[62816]: DEBUG nova.network.neutron [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Successfully created port: f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1400.431327] env[62816]: DEBUG nova.compute.manager [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Received event network-changed-7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.431327] env[62816]: DEBUG nova.compute.manager [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Refreshing instance network info cache due to event network-changed-7b35c8f0-5f21-4920-93b4-f88823b815ab. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.431327] env[62816]: DEBUG oslo_concurrency.lockutils [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] Acquiring lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.431327] env[62816]: DEBUG oslo_concurrency.lockutils [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] Acquired lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.431327] env[62816]: DEBUG nova.network.neutron [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Refreshing network info cache for port 7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.456675] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.456923] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.490359] env[62816]: INFO nova.compute.manager [-] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Took 1.06 seconds to deallocate network for instance. [ 1400.495697] env[62816]: DEBUG oslo_concurrency.lockutils [None req-45ce4295-0fe7-4a5c-960a-8aa1e21f3fd5 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "f6ddaab3-d420-4ee4-bf75-486228826635" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.019s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.501453] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e13759e-45f1-4735-9612-171874dfad77 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.254s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.532933] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "7be4c8f8-240c-4a71-93bb-aeb94243d781" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.533844] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "7be4c8f8-240c-4a71-93bb-aeb94243d781" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.533984] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "7be4c8f8-240c-4a71-93bb-aeb94243d781-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.534178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "7be4c8f8-240c-4a71-93bb-aeb94243d781-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.534348] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "7be4c8f8-240c-4a71-93bb-aeb94243d781-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.536152] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787991, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.541024] env[62816]: INFO nova.compute.manager [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Terminating instance [ 1400.544090] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "refresh_cache-7be4c8f8-240c-4a71-93bb-aeb94243d781" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.544090] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "refresh_cache-7be4c8f8-240c-4a71-93bb-aeb94243d781" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.544090] env[62816]: DEBUG nova.network.neutron [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1400.572481] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524429d6-6989-eadb-a719-36d3e0c09fbe, 'name': SearchDatastore_Task, 'duration_secs': 0.015891} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.575938] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.576258] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] fb84cb48-d1a1-4eec-adb8-8edc585263df/fb84cb48-d1a1-4eec-adb8-8edc585263df.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1400.576668] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1787992, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186986} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.579559] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.579777] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.580018] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d3292fa8-3758-4cfd-8b29-2455b0612c3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.582025] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1400.582161] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1400.582339] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1400.585461] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b3fdbe7-e4f4-478f-9fbe-1ec1b564659c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.592658] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1400.592658] env[62816]: value = "task-1787993" [ 1400.592658] env[62816]: _type = "Task" [ 1400.592658] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.596828] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.597106] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.598044] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f63fba4-e62c-45f4-a39c-6b8dd24a486f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.603453] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787993, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.609833] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1400.609833] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520fba73-a768-4cb0-956b-854a3a75ef85" [ 1400.609833] env[62816]: _type = "Task" [ 1400.609833] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.618476] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520fba73-a768-4cb0-956b-854a3a75ef85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.843956] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Releasing lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.844018] env[62816]: DEBUG nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Received event network-changed-625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.844686] env[62816]: DEBUG nova.compute.manager [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Refreshing instance network info cache due to event network-changed-625f74d1-1d6d-4ca8-90f2-5b8327963031. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.844686] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Acquiring lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.844859] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Acquired lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.845380] env[62816]: DEBUG nova.network.neutron [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Refreshing network info cache for port 625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1401.001455] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88c6735-6811-4ee6-ba2f-b1f2b4f5c619 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.007913] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1401.012068] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.012457] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1401.029142] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06786333-9a84-46fa-b761-b56dca926f2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.037369] env[62816]: DEBUG oslo_vmware.api [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1787991, 'name': PowerOnVM_Task, 'duration_secs': 0.710965} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.064252] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.064252] env[62816]: INFO nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Took 10.61 seconds to spawn the instance on the hypervisor. [ 1401.064478] env[62816]: DEBUG nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.069623] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a436223-a3a5-4cc8-9c95-f1149a80ca52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.072670] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43e5af6-6a7e-4a27-9c8a-b26b1bc1e0db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.086377] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ed2316-fb33-4a3c-a476-6af5a6f40403 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.108745] env[62816]: DEBUG nova.compute.provider_tree [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.118880] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787993, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.126513] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520fba73-a768-4cb0-956b-854a3a75ef85, 'name': SearchDatastore_Task, 'duration_secs': 0.033432} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.127660] env[62816]: DEBUG nova.network.neutron [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1401.132219] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e8da1c2-33f7-4e28-8542-f68b2ebf4677 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.139862] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1401.139862] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520f5466-6cef-23eb-e409-885da63a3095" [ 1401.139862] env[62816]: _type = "Task" [ 1401.139862] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.149449] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520f5466-6cef-23eb-e409-885da63a3095, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.283608] env[62816]: DEBUG nova.network.neutron [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.389160] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1401.428192] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1401.428551] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1401.428754] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1401.428966] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1401.429615] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1401.429863] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1401.430042] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1401.430275] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1401.430361] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1401.430547] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1401.430723] env[62816]: DEBUG nova.virt.hardware [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1401.432961] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c451acdb-37dd-4484-909e-efaa4d309801 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.436884] env[62816]: DEBUG nova.network.neutron [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updated VIF entry in instance network info cache for port 7b35c8f0-5f21-4920-93b4-f88823b815ab. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.438419] env[62816]: DEBUG nova.network.neutron [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updating instance_info_cache with network_info: [{"id": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "address": "fa:16:3e:c2:e3:73", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b35c8f0-5f", "ovs_interfaceid": "7b35c8f0-5f21-4920-93b4-f88823b815ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.444508] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed64594e-7715-4a31-af42-342420331b08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.544327] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.552201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.600206] env[62816]: INFO nova.compute.manager [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Took 20.00 seconds to build instance. [ 1401.616326] env[62816]: DEBUG nova.scheduler.client.report [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1401.619779] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787993, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.872372} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.623731] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] fb84cb48-d1a1-4eec-adb8-8edc585263df/fb84cb48-d1a1-4eec-adb8-8edc585263df.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1401.623894] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1401.625302] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d139e49b-b5e4-4eeb-8fc4-a19d2c29ee07 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.633894] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1401.634198] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1401.634408] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1401.634631] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1401.634886] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1401.635145] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1401.635495] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1401.635735] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1401.635998] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1401.636250] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1401.636519] env[62816]: DEBUG nova.virt.hardware [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1401.638455] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148bf5be-5bea-4504-bc74-0415769cf9d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.646715] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1401.646715] env[62816]: value = "task-1787994" [ 1401.646715] env[62816]: _type = "Task" [ 1401.646715] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.658511] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c35801-1d19-46c6-a5df-57ea57711fea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.669084] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520f5466-6cef-23eb-e409-885da63a3095, 'name': SearchDatastore_Task, 'duration_secs': 0.074804} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.669371] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787994, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.670144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.670415] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 2bc7f973-007d-44bd-aae8-d3b62506efba/2bc7f973-007d-44bd-aae8-d3b62506efba.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.670685] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-625ec9f0-05a8-4d5f-af26-a73e9b1348b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.684146] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1401.687472] env[62816]: DEBUG oslo.service.loopingcall [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.688736] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1401.688817] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f9bd965-c403-4e2f-b794-ba870cdc54a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.708859] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1401.708859] env[62816]: value = "task-1787995" [ 1401.708859] env[62816]: _type = "Task" [ 1401.708859] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.713827] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.713827] env[62816]: value = "task-1787996" [ 1401.713827] env[62816]: _type = "Task" [ 1401.713827] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.720426] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1787995, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.725330] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787996, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.787082] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "refresh_cache-7be4c8f8-240c-4a71-93bb-aeb94243d781" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.787560] env[62816]: DEBUG nova.compute.manager [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1401.787768] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1401.788783] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2710d39-de59-4fc2-9a7b-729de868cb17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.797811] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1401.798247] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2510ec7a-1e34-4c06-9e59-1e66161f54de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.805606] env[62816]: DEBUG oslo_vmware.api [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1401.805606] env[62816]: value = "task-1787997" [ 1401.805606] env[62816]: _type = "Task" [ 1401.805606] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.814264] env[62816]: DEBUG oslo_vmware.api [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787997, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.916400] env[62816]: DEBUG nova.network.neutron [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updated VIF entry in instance network info cache for port 625f74d1-1d6d-4ca8-90f2-5b8327963031. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.920542] env[62816]: DEBUG nova.network.neutron [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updating instance_info_cache with network_info: [{"id": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "address": "fa:16:3e:32:f6:90", "network": {"id": "6b18a31b-5da2-4068-8d98-3f6d1b0178c3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1012623858-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55ac373cda544fdda9b58434d070d395", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap625f74d1-1d", "ovs_interfaceid": "625f74d1-1d6d-4ca8-90f2-5b8327963031", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.941339] env[62816]: DEBUG oslo_concurrency.lockutils [req-2843e3e0-b8c2-4593-9822-ca62a17c85c8 req-228302c2-aa3b-4a50-b2c4-c05e3d399d8c service nova] Releasing lock "refresh_cache-11a4d835-c149-49f0-8e4f-b3f9a7f1afca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.107276] env[62816]: DEBUG oslo_concurrency.lockutils [None req-249b27c3-61a8-4f4b-a132-758c6a030e02 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.606s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.121285] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.756s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.121538] env[62816]: INFO nova.compute.manager [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Migrating [ 1402.121790] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.121940] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.123492] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.375s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.126238] env[62816]: INFO nova.compute.claims [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1402.133118] env[62816]: INFO nova.compute.rpcapi [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1402.133658] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.164416] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787994, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085595} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.165447] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.166586] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415bab43-ee15-4d50-893b-5159eeaa1e9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.200779] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] fb84cb48-d1a1-4eec-adb8-8edc585263df/fb84cb48-d1a1-4eec-adb8-8edc585263df.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.201498] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37afe976-6e33-4326-8520-509c7827dc86 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.229851] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1787996, 'name': CreateVM_Task, 'duration_secs': 0.412887} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.233555] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1402.234350] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1402.234350] env[62816]: value = "task-1787998" [ 1402.234350] env[62816]: _type = "Task" [ 1402.234350] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.234599] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1787995, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.235156] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.235339] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.235937] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1402.236299] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee830a67-3234-45eb-bf29-c16706a01074 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.248692] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787998, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.249088] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1402.249088] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5282c8b1-150f-8ae2-8661-e1b7dda784f5" [ 1402.249088] env[62816]: _type = "Task" [ 1402.249088] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.257575] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5282c8b1-150f-8ae2-8661-e1b7dda784f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.319387] env[62816]: DEBUG oslo_vmware.api [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1787997, 'name': PowerOffVM_Task, 'duration_secs': 0.2437} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.320634] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1402.320634] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1402.320634] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f77625fc-ffb0-4a83-b59e-13609b910334 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.344905] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1402.345259] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1402.346237] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleting the datastore file [datastore1] 7be4c8f8-240c-4a71-93bb-aeb94243d781 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1402.346237] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6df6b2d-2171-4918-af83-d15588574208 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.352815] env[62816]: DEBUG oslo_vmware.api [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1402.352815] env[62816]: value = "task-1788000" [ 1402.352815] env[62816]: _type = "Task" [ 1402.352815] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.364768] env[62816]: DEBUG oslo_vmware.api [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1788000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.423777] env[62816]: DEBUG oslo_concurrency.lockutils [req-10a21869-bd47-45b7-98a3-05dc2ed9ddf5 req-89f2c747-5217-455c-8de7-9c75a7a36a64 service nova] Releasing lock "refresh_cache-0b10aca0-950b-46f6-8367-5cb9ea7540c8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.612694] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1402.614762] env[62816]: DEBUG nova.network.neutron [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Successfully updated port: f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1402.650408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.650908] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.650908] env[62816]: DEBUG nova.network.neutron [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1402.730515] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1787995, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.881091} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.730882] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 2bc7f973-007d-44bd-aae8-d3b62506efba/2bc7f973-007d-44bd-aae8-d3b62506efba.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1402.731050] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1402.731597] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b5fd047-3cb8-4303-a0de-666cee9fc020 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.745594] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1402.745594] env[62816]: value = "task-1788001" [ 1402.745594] env[62816]: _type = "Task" [ 1402.745594] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.752549] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787998, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.759883] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.765230] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5282c8b1-150f-8ae2-8661-e1b7dda784f5, 'name': SearchDatastore_Task, 'duration_secs': 0.102069} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.766550] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.766550] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1402.766550] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1402.766550] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.766809] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1402.766809] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-79d66de2-81fc-4d28-bb89-c06bca8023a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.775394] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1402.775612] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1402.776455] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57f43dd0-b146-4fdd-8640-09ec5d3b3487 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.781939] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1402.781939] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529b3764-fe21-1afc-14f3-abd1b3291bfb" [ 1402.781939] env[62816]: _type = "Task" [ 1402.781939] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.789873] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529b3764-fe21-1afc-14f3-abd1b3291bfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.865510] env[62816]: DEBUG oslo_vmware.api [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1788000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.423267} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.865510] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1402.865510] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1402.865510] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1402.865510] env[62816]: INFO nova.compute.manager [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1402.865768] env[62816]: DEBUG oslo.service.loopingcall [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.865768] env[62816]: DEBUG nova.compute.manager [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1402.865768] env[62816]: DEBUG nova.network.neutron [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1402.887924] env[62816]: DEBUG nova.network.neutron [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1403.122080] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.123214] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.123214] env[62816]: DEBUG nova.network.neutron [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1403.144392] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.247706] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1787998, 'name': ReconfigVM_Task, 'duration_secs': 0.536822} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.253073] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Reconfigured VM instance instance-0000000e to attach disk [datastore1] fb84cb48-d1a1-4eec-adb8-8edc585263df/fb84cb48-d1a1-4eec-adb8-8edc585263df.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1403.254039] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2eb8cf1d-2ca1-43fc-bd9f-93d252a31cfc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.263326] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1403.263326] env[62816]: value = "task-1788002" [ 1403.263326] env[62816]: _type = "Task" [ 1403.263326] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.263549] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068356} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.263857] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1403.268188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a192114-6e0c-45b3-bdd3-1efca7f60a14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.275759] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788002, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.292610] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 2bc7f973-007d-44bd-aae8-d3b62506efba/2bc7f973-007d-44bd-aae8-d3b62506efba.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1403.300780] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c285cbba-ffe5-4453-86f7-36e9a2bc4a8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.319495] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "c6dc008c-6336-4271-9635-a7e0652138e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.319739] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "c6dc008c-6336-4271-9635-a7e0652138e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.324699] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529b3764-fe21-1afc-14f3-abd1b3291bfb, 'name': SearchDatastore_Task, 'duration_secs': 0.024233} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.326780] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1403.326780] env[62816]: value = "task-1788003" [ 1403.326780] env[62816]: _type = "Task" [ 1403.326780] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.326780] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6add5781-fd74-4635-9e10-7aa1d3d97419 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.346412] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1403.346412] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e35b88-19ed-5a26-515d-a737b779b339" [ 1403.346412] env[62816]: _type = "Task" [ 1403.346412] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.346650] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788003, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.358174] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e35b88-19ed-5a26-515d-a737b779b339, 'name': SearchDatastore_Task, 'duration_secs': 0.016112} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.358393] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.358671] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1403.358919] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db976a2f-81bb-43ff-aa4f-77f6fbd711b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.369020] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1403.369020] env[62816]: value = "task-1788004" [ 1403.369020] env[62816]: _type = "Task" [ 1403.369020] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.375632] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.391281] env[62816]: DEBUG nova.network.neutron [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.518147] env[62816]: DEBUG nova.network.neutron [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.567914] env[62816]: DEBUG nova.compute.manager [req-5b132285-3ff9-459e-9d72-24ed13e8613b req-a2c6b20a-e48f-4af8-81a5-03122ff8332b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-vif-plugged-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.568146] env[62816]: DEBUG oslo_concurrency.lockutils [req-5b132285-3ff9-459e-9d72-24ed13e8613b req-a2c6b20a-e48f-4af8-81a5-03122ff8332b service nova] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.568363] env[62816]: DEBUG oslo_concurrency.lockutils [req-5b132285-3ff9-459e-9d72-24ed13e8613b req-a2c6b20a-e48f-4af8-81a5-03122ff8332b service nova] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.568845] env[62816]: DEBUG oslo_concurrency.lockutils [req-5b132285-3ff9-459e-9d72-24ed13e8613b req-a2c6b20a-e48f-4af8-81a5-03122ff8332b service nova] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.568845] env[62816]: DEBUG nova.compute.manager [req-5b132285-3ff9-459e-9d72-24ed13e8613b req-a2c6b20a-e48f-4af8-81a5-03122ff8332b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] No waiting events found dispatching network-vif-plugged-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1403.569986] env[62816]: WARNING nova.compute.manager [req-5b132285-3ff9-459e-9d72-24ed13e8613b req-a2c6b20a-e48f-4af8-81a5-03122ff8332b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received unexpected event network-vif-plugged-f2f2e184-1921-455c-b435-44548769245c for instance with vm_state building and task_state spawning. [ 1403.669545] env[62816]: DEBUG nova.network.neutron [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1403.775934] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788002, 'name': Rename_Task, 'duration_secs': 0.164898} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.778719] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1403.780824] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eca25cb-5cdf-4c17-b1c3-e106cdd1ba9e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.786261] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86e7b80b-ad9a-4f1b-bacb-0a3459913f06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.796018] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e4b463-0788-4214-8851-2d09838e2cf5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.798147] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1403.798147] env[62816]: value = "task-1788005" [ 1403.798147] env[62816]: _type = "Task" [ 1403.798147] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.841152] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e34217-7408-486f-bf19-e3a9312b0c37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.847562] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788005, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.853605] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788003, 'name': ReconfigVM_Task, 'duration_secs': 0.417865} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.855044] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 2bc7f973-007d-44bd-aae8-d3b62506efba/2bc7f973-007d-44bd-aae8-d3b62506efba.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1403.855840] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1189c4b7-48c6-4c54-b853-d9c9d044f70d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.859045] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b968d18-494f-4e3a-a584-3ba6477efcd7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.875785] env[62816]: DEBUG nova.compute.provider_tree [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1403.880346] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1403.880346] env[62816]: value = "task-1788006" [ 1403.880346] env[62816]: _type = "Task" [ 1403.880346] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.891249] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788004, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.897827] env[62816]: INFO nova.compute.manager [-] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Took 1.03 seconds to deallocate network for instance. [ 1403.898599] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788006, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.899500] env[62816]: DEBUG nova.network.neutron [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.024629] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.116753] env[62816]: DEBUG nova.compute.manager [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Received event network-changed-de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.116970] env[62816]: DEBUG nova.compute.manager [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Refreshing instance network info cache due to event network-changed-de736438-152f-4337-ae73-74024c1cac15. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1404.117211] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] Acquiring lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.117357] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] Acquired lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.117518] env[62816]: DEBUG nova.network.neutron [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Refreshing network info cache for port de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.309315] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788005, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.385140] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817627} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.387161] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1404.387375] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1404.387615] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2e456dd8-6862-4065-8799-108e34fa6c90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.396484] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "455052cc-292a-414c-8c83-bc512c49a197" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.396953] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.397492] env[62816]: DEBUG nova.compute.manager [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1404.398121] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788006, 'name': Rename_Task, 'duration_secs': 0.198203} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.398862] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9f2185-afe1-4ca7-a6a7-2062df8a947b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.403371] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.404954] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.405483] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance network_info: |[{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1404.405750] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-742ace27-88ac-471c-ad08-fdb72367209d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.407399] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1404.407399] env[62816]: value = "task-1788007" [ 1404.407399] env[62816]: _type = "Task" [ 1404.407399] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.407930] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:90:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2f2e184-1921-455c-b435-44548769245c', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1404.420595] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating folder: Project (005f772e517340a0acaac0d61b8262df). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.421932] env[62816]: ERROR nova.scheduler.client.report [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [req-67854d45-f054-4c3a-a749-dc8162d8cdb2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-67854d45-f054-4c3a-a749-dc8162d8cdb2"}]} [ 1404.423066] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.423297] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9221c613-db6b-40d9-b336-433ffdeda82c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.433183] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1404.433183] env[62816]: value = "task-1788008" [ 1404.433183] env[62816]: _type = "Task" [ 1404.433183] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.433511] env[62816]: DEBUG nova.compute.manager [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1404.434085] env[62816]: DEBUG nova.objects.instance [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lazy-loading 'flavor' on Instance uuid 455052cc-292a-414c-8c83-bc512c49a197 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1404.452131] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788007, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.452131] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Created folder: Project (005f772e517340a0acaac0d61b8262df) in parent group-v370905. [ 1404.452131] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating folder: Instances. Parent ref: group-v370956. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1404.452131] env[62816]: DEBUG nova.scheduler.client.report [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1404.455067] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be61bfa1-f601-4239-acfa-57edc137e24c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.460694] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788008, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.468293] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Created folder: Instances in parent group-v370956. [ 1404.468518] env[62816]: DEBUG oslo.service.loopingcall [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.468925] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1404.468925] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-68339d3d-5b7b-4cb0-a801-c1af0f2dc8cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.485158] env[62816]: DEBUG nova.scheduler.client.report [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1404.485390] env[62816]: DEBUG nova.compute.provider_tree [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1404.492504] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1404.492504] env[62816]: value = "task-1788011" [ 1404.492504] env[62816]: _type = "Task" [ 1404.492504] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.497254] env[62816]: DEBUG nova.scheduler.client.report [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1404.505578] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788011, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.517689] env[62816]: DEBUG nova.scheduler.client.report [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1404.587912] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "de33d02f-7e34-4619-a2ed-cda6c54aa030" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.588759] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.588759] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "de33d02f-7e34-4619-a2ed-cda6c54aa030-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1404.588935] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.589499] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.595114] env[62816]: INFO nova.compute.manager [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Terminating instance [ 1404.597377] env[62816]: DEBUG nova.compute.manager [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1404.597600] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1404.598517] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830ed360-60a4-4533-8979-f53803cd4519 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.607015] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.609569] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-594289c3-1b90-45c7-ae26-68cb3ac90a2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.616714] env[62816]: DEBUG oslo_vmware.api [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1404.616714] env[62816]: value = "task-1788012" [ 1404.616714] env[62816]: _type = "Task" [ 1404.616714] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.629767] env[62816]: DEBUG oslo_vmware.api [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1788012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.707219] env[62816]: DEBUG nova.compute.manager [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1404.711328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77097117-414b-4838-86f7-e30a66688c9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.813611] env[62816]: DEBUG oslo_vmware.api [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788005, 'name': PowerOnVM_Task, 'duration_secs': 0.884697} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.813937] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1404.814593] env[62816]: INFO nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Took 11.78 seconds to spawn the instance on the hypervisor. [ 1404.814960] env[62816]: DEBUG nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1404.816135] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e500f4ee-f81d-40fb-bee7-5acc55150035 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.936259] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788007, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14345} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.936687] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1404.937839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411be01a-4d34-46ad-b697-04ac5f05c0f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.952330] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1404.961701] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-109301ac-842b-4e62-9d75-8ecaad217707 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.973852] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1404.978293] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-352255bf-1a4c-49d2-b283-8de376f5fb3f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.000939] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788008, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.006052] env[62816]: DEBUG oslo_vmware.api [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1405.006052] env[62816]: value = "task-1788013" [ 1405.006052] env[62816]: _type = "Task" [ 1405.006052] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.013917] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788011, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.014279] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1405.014279] env[62816]: value = "task-1788014" [ 1405.014279] env[62816]: _type = "Task" [ 1405.014279] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.024669] env[62816]: DEBUG oslo_vmware.api [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788013, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.031015] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.066753] env[62816]: DEBUG nova.network.neutron [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updated VIF entry in instance network info cache for port de736438-152f-4337-ae73-74024c1cac15. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1405.066753] env[62816]: DEBUG nova.network.neutron [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updating instance_info_cache with network_info: [{"id": "de736438-152f-4337-ae73-74024c1cac15", "address": "fa:16:3e:af:7e:d2", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde736438-15", "ovs_interfaceid": "de736438-152f-4337-ae73-74024c1cac15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.103551] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec79915b-c33c-467d-b8a1-9a982c994497 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.112044] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bba0291-01c9-4060-b17e-724b38d91941 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.147435] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b40058-dacc-4fb0-b0bd-5c1ce6b58fec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.154292] env[62816]: DEBUG oslo_vmware.api [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1788012, 'name': PowerOffVM_Task, 'duration_secs': 0.222988} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.154981] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.155177] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1405.155428] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ec5b8e9-1ce0-46ad-b766-ad0be9d6ec15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.161074] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9bd924-440d-4f2e-9969-e8d5c6170291 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.174989] env[62816]: DEBUG nova.compute.provider_tree [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1405.225435] env[62816]: INFO nova.compute.manager [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] instance snapshotting [ 1405.228658] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8407c6b9-0ffa-4126-8712-84a5d9021221 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.249080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15d40be8-4eb8-4da8-ba35-eb23034993aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.345073] env[62816]: INFO nova.compute.manager [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Took 23.14 seconds to build instance. [ 1405.454044] env[62816]: DEBUG oslo_vmware.api [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788008, 'name': PowerOnVM_Task, 'duration_secs': 0.929949} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.458068] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.458068] env[62816]: INFO nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Took 7.42 seconds to spawn the instance on the hypervisor. [ 1405.458068] env[62816]: DEBUG nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1405.458068] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74065040-3663-4a08-a51d-504d65c05b90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.518815] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788011, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.532178] env[62816]: DEBUG oslo_vmware.api [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788013, 'name': PowerOffVM_Task, 'duration_secs': 0.3826} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.532906] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1405.533116] env[62816]: DEBUG nova.compute.manager [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1405.533895] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd95b628-85d2-4273-b108-1c1ce4c229ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.545185] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788014, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.546803] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2c5ce1-d616-4519-ae7d-3733fb76ef4b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.574702] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4d6d7fd-5212-4aca-951e-0625f42806f2 req-6c3c9b17-716d-4754-b51d-9b4c07adcfda service nova] Releasing lock "refresh_cache-f6ddaab3-d420-4ee4-bf75-486228826635" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.574702] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1405.710953] env[62816]: DEBUG nova.scheduler.client.report [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1405.711279] env[62816]: DEBUG nova.compute.provider_tree [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 42 to 43 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1405.711420] env[62816]: DEBUG nova.compute.provider_tree [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1405.760707] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1405.761113] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5a51aa98-4944-4c40-85e2-b8fcbbbe6960 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.769681] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1405.769681] env[62816]: value = "task-1788016" [ 1405.769681] env[62816]: _type = "Task" [ 1405.769681] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.779077] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.850339] env[62816]: DEBUG oslo_concurrency.lockutils [None req-580da89b-0b1c-468f-abe1-03e0a2030bf5 tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.891s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.975522] env[62816]: INFO nova.compute.manager [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Took 23.71 seconds to build instance. [ 1406.014690] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788011, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.028537] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788014, 'name': ReconfigVM_Task, 'duration_secs': 0.968183} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.028988] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1406.029836] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-788b7f2a-5643-4116-b4f7-2a64819fa690 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.035728] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1406.035728] env[62816]: value = "task-1788017" [ 1406.035728] env[62816]: _type = "Task" [ 1406.035728] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.045705] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788017, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.061794] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d3006dc4-ecf6-4fd6-ba47-ed3f90e6db60 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.665s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.082175] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1406.082536] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01197322-fadb-423e-8ecd-8f2de13c167d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.092133] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1406.092133] env[62816]: value = "task-1788018" [ 1406.092133] env[62816]: _type = "Task" [ 1406.092133] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.100334] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.158986] env[62816]: DEBUG nova.compute.manager [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-changed-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1406.159211] env[62816]: DEBUG nova.compute.manager [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Refreshing instance network info cache due to event network-changed-f2f2e184-1921-455c-b435-44548769245c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1406.159414] env[62816]: DEBUG oslo_concurrency.lockutils [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.159553] env[62816]: DEBUG oslo_concurrency.lockutils [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.159906] env[62816]: DEBUG nova.network.neutron [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Refreshing network info cache for port f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1406.218020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.094s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.218020] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1406.224036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.454s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.224036] env[62816]: DEBUG nova.objects.instance [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lazy-loading 'resources' on Instance uuid 666d5105-ee2e-4691-b13c-bd7feb045959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1406.289362] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.354247] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1406.482411] env[62816]: DEBUG oslo_concurrency.lockutils [None req-56fb9ae4-481b-456d-888e-8041718c6adc tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "2bc7f973-007d-44bd-aae8-d3b62506efba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.231s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.517292] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788011, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.547341] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788017, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.601960] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788018, 'name': PowerOffVM_Task, 'duration_secs': 0.21825} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.602282] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1406.602473] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1406.635465] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1406.635718] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1406.635906] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Deleting the datastore file [datastore1] de33d02f-7e34-4619-a2ed-cda6c54aa030 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1406.636193] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6579ceea-6e38-4032-ae22-8ebda467b2c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.642736] env[62816]: DEBUG oslo_vmware.api [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for the task: (returnval){ [ 1406.642736] env[62816]: value = "task-1788019" [ 1406.642736] env[62816]: _type = "Task" [ 1406.642736] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.651605] env[62816]: DEBUG oslo_vmware.api [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1788019, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.729203] env[62816]: DEBUG nova.compute.utils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1406.732164] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1406.732164] env[62816]: DEBUG nova.network.neutron [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1406.781325] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788016, 'name': CreateSnapshot_Task, 'duration_secs': 0.938205} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.784050] env[62816]: DEBUG nova.policy [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf0d42b1de8d4053a4424a54769ef282', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '48b0cb4d3e7844aea904384f8677bbc0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1406.786071] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1406.791140] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7748bf20-bd56-4857-9302-12624315d915 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.876513] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.984849] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1407.026442] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788011, 'name': CreateVM_Task, 'duration_secs': 2.189637} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.026442] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1407.026442] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.026442] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.026720] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1407.026871] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad601756-0d28-4907-ac2a-d1856040c0cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.037405] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1407.037405] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52204946-5e4d-8b5d-963d-e056a2810451" [ 1407.037405] env[62816]: _type = "Task" [ 1407.037405] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.059057] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788017, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.059338] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52204946-5e4d-8b5d-963d-e056a2810451, 'name': SearchDatastore_Task, 'duration_secs': 0.010135} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.062193] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.062428] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1407.062656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1407.062799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1407.062973] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1407.064666] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c5dcafb-fafd-4c09-9f71-94aa0680fdb6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.073926] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1407.073926] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1407.079846] env[62816]: DEBUG nova.network.neutron [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updated VIF entry in instance network info cache for port f2f2e184-1921-455c-b435-44548769245c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1407.080041] env[62816]: DEBUG nova.network.neutron [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.081979] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da46c781-b512-49e7-8420-8af071471836 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.093266] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1407.093266] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523b60cd-3fd6-7298-2e5f-7b3f5c8f7e5a" [ 1407.093266] env[62816]: _type = "Task" [ 1407.093266] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.101506] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523b60cd-3fd6-7298-2e5f-7b3f5c8f7e5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.111720] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1407.111720] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1407.111720] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1407.111720] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1407.112225] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1407.112225] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1407.112225] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1407.112225] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1407.112225] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1407.112467] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1407.112467] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1407.117005] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ffb2038a-c7dd-4707-a3f7-cb1e2d8d20c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.139706] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1407.139706] env[62816]: value = "task-1788020" [ 1407.139706] env[62816]: _type = "Task" [ 1407.139706] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.153446] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788020, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.158249] env[62816]: DEBUG oslo_vmware.api [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Task: {'id': task-1788019, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189151} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.158764] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1407.158960] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1407.159326] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1407.159579] env[62816]: INFO nova.compute.manager [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Took 2.56 seconds to destroy the instance on the hypervisor. [ 1407.159872] env[62816]: DEBUG oslo.service.loopingcall [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1407.160468] env[62816]: DEBUG nova.compute.manager [-] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1407.161427] env[62816]: DEBUG nova.network.neutron [-] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1407.238304] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1407.247111] env[62816]: DEBUG nova.network.neutron [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Successfully created port: 6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1407.315536] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1407.319129] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5f39ca45-ea33-4766-85d2-b39097382a2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.330372] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1407.330372] env[62816]: value = "task-1788021" [ 1407.330372] env[62816]: _type = "Task" [ 1407.330372] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.340317] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788021, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.466217] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce322f3a-5271-417d-b258-f4337fc7f3fe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.475029] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1036e1bc-e1cf-48fd-ac33-d25f479a4009 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.513350] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eab822d-989e-4d9d-80d8-600e9f671daf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.521824] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6887437e-4a2c-4477-b86c-84551d499cac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.529566] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.542497] env[62816]: DEBUG nova.compute.provider_tree [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1407.552350] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788017, 'name': Rename_Task, 'duration_secs': 1.047183} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.552638] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1407.552885] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81aa3da5-40e3-4296-a676-c63dd33c6357 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.560392] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1407.560392] env[62816]: value = "task-1788022" [ 1407.560392] env[62816]: _type = "Task" [ 1407.560392] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.568611] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788022, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.589815] env[62816]: DEBUG oslo_concurrency.lockutils [req-17e3b20b-3a7c-4c41-8dcc-a3136941bde2 req-b67cddf0-b6f5-4edf-9639-4d720078b8d3 service nova] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.603340] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523b60cd-3fd6-7298-2e5f-7b3f5c8f7e5a, 'name': SearchDatastore_Task, 'duration_secs': 0.023003} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.604885] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2140c940-9f43-4468-9fd1-27fd67ccf8cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.611204] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1407.611204] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d30f0e-344c-b7df-ceb2-68a9e48de6ce" [ 1407.611204] env[62816]: _type = "Task" [ 1407.611204] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.623559] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d30f0e-344c-b7df-ceb2-68a9e48de6ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.649869] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788020, 'name': ReconfigVM_Task, 'duration_secs': 0.214502} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.650282] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1407.752308] env[62816]: INFO nova.virt.block_device [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Booting with volume 76167f57-102e-45d9-8256-5434bbce481e at /dev/sda [ 1407.806278] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c8b21e7f-39b1-4174-a04a-011058fab0b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.815736] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a58fb5-634a-4b57-90c9-262ea753363d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.840177] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788021, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.854598] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d9b8afa-b7e0-48fb-88e4-a47f6e0b7d75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.864791] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086724f4-7e95-4abc-98b4-dc8675f5d4c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.876408] env[62816]: DEBUG nova.compute.manager [None req-9b4b14c9-8789-4d39-811f-94306942f30a tempest-ServerDiagnosticsV248Test-1306065734 tempest-ServerDiagnosticsV248Test-1306065734-project-admin] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.877552] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe48bdf8-9d48-4806-ab20-421cec8c4d43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.885159] env[62816]: INFO nova.compute.manager [None req-9b4b14c9-8789-4d39-811f-94306942f30a tempest-ServerDiagnosticsV248Test-1306065734 tempest-ServerDiagnosticsV248Test-1306065734-project-admin] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Retrieving diagnostics [ 1407.885986] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5047e91-7b78-4ebc-a9ab-07d362152b4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.926192] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182f2a5f-9635-47db-89ac-a1ffcdb105af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.936567] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77a6ff3e-a77b-4702-b8c9-87329c4a939d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.950934] env[62816]: DEBUG nova.virt.block_device [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updating existing volume attachment record: 09532d56-5a40-445f-8d7d-dfc8d55a9da7 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1407.955482] env[62816]: DEBUG nova.network.neutron [-] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1408.072322] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788022, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.088035] env[62816]: DEBUG nova.scheduler.client.report [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 43 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1408.088035] env[62816]: DEBUG nova.compute.provider_tree [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 43 to 44 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1408.088035] env[62816]: DEBUG nova.compute.provider_tree [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1408.126035] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d30f0e-344c-b7df-ceb2-68a9e48de6ce, 'name': SearchDatastore_Task, 'duration_secs': 0.010123} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.126035] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1408.126035] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1408.126035] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f19d66a-f429-42d1-a7a7-56a24f9b0b44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.132360] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1408.132360] env[62816]: value = "task-1788023" [ 1408.132360] env[62816]: _type = "Task" [ 1408.132360] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.140801] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788023, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.159652] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1408.160014] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1408.160212] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1408.160370] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1408.160543] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1408.160746] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1408.160975] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1408.161147] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1408.161313] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1408.161472] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1408.161672] env[62816]: DEBUG nova.virt.hardware [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1408.167702] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Reconfiguring VM instance instance-00000001 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1408.168006] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7abc2827-7746-4268-beba-a605e7b04b0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.188477] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1408.188477] env[62816]: value = "task-1788024" [ 1408.188477] env[62816]: _type = "Task" [ 1408.188477] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.198011] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788024, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.231789] env[62816]: DEBUG nova.compute.manager [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1408.232660] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4a0228-6f78-476d-9267-23c3a597c046 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.341874] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788021, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.458394] env[62816]: INFO nova.compute.manager [-] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Took 1.30 seconds to deallocate network for instance. [ 1408.573952] env[62816]: DEBUG oslo_vmware.api [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788022, 'name': PowerOnVM_Task, 'duration_secs': 0.880054} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.574028] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.574230] env[62816]: DEBUG nova.compute.manager [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1408.575032] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e6c350-67ea-4b3b-9fbf-17b6d67210b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.590896] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.369s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1408.595319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.717s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1408.598342] env[62816]: INFO nova.compute.claims [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1408.626988] env[62816]: INFO nova.scheduler.client.report [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Deleted allocations for instance 666d5105-ee2e-4691-b13c-bd7feb045959 [ 1408.650602] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788023, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.698468] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788024, 'name': ReconfigVM_Task, 'duration_secs': 0.365568} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.699058] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Reconfigured VM instance instance-00000001 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1408.699564] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ab5035-6a8f-40ec-a0ad-178e2315b149 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.729315] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd/3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1408.729827] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04c3e963-d76b-4ec6-b580-4625f6edb03e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.748181] env[62816]: INFO nova.compute.manager [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] instance snapshotting [ 1408.748181] env[62816]: WARNING nova.compute.manager [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1408.751404] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738e49b8-fb53-4b30-a2d8-0ff09469a8b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.757089] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1408.757089] env[62816]: value = "task-1788025" [ 1408.757089] env[62816]: _type = "Task" [ 1408.757089] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.776538] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1695c1c5-04b9-467c-ba4f-ccc7e23eee18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.783790] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788025, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.844256] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788021, 'name': CloneVM_Task, 'duration_secs': 1.358639} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.844256] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Created linked-clone VM from snapshot [ 1408.845114] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf40aa4-39c0-4efe-a4b2-1cae64eab171 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.853853] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Uploading image 4ef107ae-3677-4109-985c-4301290811e8 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1408.872494] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1408.872795] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8fbc0fae-ce44-4129-9fd8-e88a35898139 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.879861] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1408.879861] env[62816]: value = "task-1788026" [ 1408.879861] env[62816]: _type = "Task" [ 1408.879861] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.890357] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788026, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.966227] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.094569] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.134292] env[62816]: DEBUG nova.network.neutron [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Successfully updated port: 6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1409.147566] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e559a110-77ee-4941-8e15-8bb3c1b2bd5e tempest-ServerExternalEventsTest-1066914606 tempest-ServerExternalEventsTest-1066914606-project-member] Lock "666d5105-ee2e-4691-b13c-bd7feb045959" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.882s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.152952] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788023, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.756462} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.154597] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1409.154832] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1409.156453] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a3012d6-cfa8-406a-aeed-99630b26568f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.166361] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1409.166361] env[62816]: value = "task-1788027" [ 1409.166361] env[62816]: _type = "Task" [ 1409.166361] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.176050] env[62816]: DEBUG nova.compute.manager [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Received event network-changed-b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1409.176780] env[62816]: DEBUG nova.compute.manager [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Refreshing instance network info cache due to event network-changed-b6ecd005-0fec-4275-91b7-0814f3514b40. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1409.176780] env[62816]: DEBUG oslo_concurrency.lockutils [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] Acquiring lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.176780] env[62816]: DEBUG oslo_concurrency.lockutils [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] Acquired lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.176780] env[62816]: DEBUG nova.network.neutron [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Refreshing network info cache for port b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1409.187116] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.267150] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788025, 'name': ReconfigVM_Task, 'duration_secs': 0.466468} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.267537] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd/3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1409.267885] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1409.294172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1409.294516] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c4a5f02b-0dc5-4ba7-98c7-49d61a4a2377 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.309610] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1409.309610] env[62816]: value = "task-1788028" [ 1409.309610] env[62816]: _type = "Task" [ 1409.309610] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.320397] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788028, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.391452] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788026, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.544971] env[62816]: DEBUG nova.compute.manager [req-2b470590-a6a0-4c68-8cda-d97f6944266a req-cf6276fc-1da0-451a-89b2-e51d499a456f service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Received event network-vif-plugged-6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1409.545204] env[62816]: DEBUG oslo_concurrency.lockutils [req-2b470590-a6a0-4c68-8cda-d97f6944266a req-cf6276fc-1da0-451a-89b2-e51d499a456f service nova] Acquiring lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.545411] env[62816]: DEBUG oslo_concurrency.lockutils [req-2b470590-a6a0-4c68-8cda-d97f6944266a req-cf6276fc-1da0-451a-89b2-e51d499a456f service nova] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.545620] env[62816]: DEBUG oslo_concurrency.lockutils [req-2b470590-a6a0-4c68-8cda-d97f6944266a req-cf6276fc-1da0-451a-89b2-e51d499a456f service nova] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1409.545740] env[62816]: DEBUG nova.compute.manager [req-2b470590-a6a0-4c68-8cda-d97f6944266a req-cf6276fc-1da0-451a-89b2-e51d499a456f service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] No waiting events found dispatching network-vif-plugged-6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1409.545952] env[62816]: WARNING nova.compute.manager [req-2b470590-a6a0-4c68-8cda-d97f6944266a req-cf6276fc-1da0-451a-89b2-e51d499a456f service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Received unexpected event network-vif-plugged-6b060db8-dee6-465b-8fb0-980f49a5e433 for instance with vm_state building and task_state block_device_mapping. [ 1409.636200] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquiring lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1409.636478] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquired lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1409.636524] env[62816]: DEBUG nova.network.neutron [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1409.682246] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082328} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.689586] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1409.689586] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de43f6ae-8b83-42ef-babf-770ae264c880 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.719095] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1409.723047] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6abcf158-ae84-48bd-a815-2593a5d20b3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.749536] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1409.749536] env[62816]: value = "task-1788029" [ 1409.749536] env[62816]: _type = "Task" [ 1409.749536] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.760398] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.785719] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc063a97-d26c-49f3-9ecd-9f993e528660 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.814186] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-837c4af4-bcb6-40c1-8350-3e3f2d2535a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.828251] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788028, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.848247] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1409.891847] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788026, 'name': Destroy_Task, 'duration_secs': 0.626558} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.894717] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Destroyed the VM [ 1409.895073] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1409.895576] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e89b99f6-cb30-4936-b45c-fe30f1605e5d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.904193] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1409.904193] env[62816]: value = "task-1788030" [ 1409.904193] env[62816]: _type = "Task" [ 1409.904193] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.916228] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788030, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.085683] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1410.085683] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1410.085683] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1410.085683] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1410.090378] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1410.090378] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1410.090378] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1410.090378] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1410.090378] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1410.090549] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1410.090549] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1410.090549] env[62816]: DEBUG nova.virt.hardware [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1410.090549] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa8d2373-fb2e-44bb-8119-63a22fdd85ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.095904] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd3aa52d-1431-4ce1-a20f-857d01e29956 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.189016] env[62816]: DEBUG nova.network.neutron [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updated VIF entry in instance network info cache for port b6ecd005-0fec-4275-91b7-0814f3514b40. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1410.189448] env[62816]: DEBUG nova.network.neutron [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updating instance_info_cache with network_info: [{"id": "b6ecd005-0fec-4275-91b7-0814f3514b40", "address": "fa:16:3e:ae:6d:f6", "network": {"id": "bf0dc49e-afa3-4f8d-a85b-51f2f60b1f0c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-155605247-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "865c2861f9a745f59e7ed2bc0d2ac48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d88b750a-0e7d-4f16-8bd5-8e6d5743b720", "external-id": "nsx-vlan-transportzone-715", "segmentation_id": 715, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6ecd005-0f", "ovs_interfaceid": "b6ecd005-0fec-4275-91b7-0814f3514b40", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.194486] env[62816]: DEBUG nova.network.neutron [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1410.260251] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788029, 'name': ReconfigVM_Task, 'duration_secs': 0.405123} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.265199] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1410.266069] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa9182e7-8578-49e5-af75-c471b40015ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.272830] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1410.272830] env[62816]: value = "task-1788031" [ 1410.272830] env[62816]: _type = "Task" [ 1410.272830] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.284207] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788031, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.315285] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb7cc9e-674c-452c-b4d3-761608672987 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.328685] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-109420c5-31e7-4de7-9e42-cd66460f17d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.331869] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788028, 'name': CreateSnapshot_Task, 'duration_secs': 0.877196} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.332124] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1410.333213] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79fe9bb-a576-409c-96ab-dfaff4326c96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.366258] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768a9ea2-1d0b-4ac6-b748-f871910528f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.381427] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c898be2-0c8d-4a8d-b3c6-9eefb6d4c22d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.397793] env[62816]: DEBUG nova.compute.provider_tree [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1410.413854] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788030, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.426073] env[62816]: DEBUG nova.network.neutron [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updating instance_info_cache with network_info: [{"id": "6b060db8-dee6-465b-8fb0-980f49a5e433", "address": "fa:16:3e:25:17:f5", "network": {"id": "dc60bf1f-0f94-4aba-8b9f-40ef9e49b444", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1385986515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48b0cb4d3e7844aea904384f8677bbc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27681cba-790d-451e-9d12-d179871f375a", "external-id": "cl2-zone-147", "segmentation_id": 147, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b060db8-de", "ovs_interfaceid": "6b060db8-dee6-465b-8fb0-980f49a5e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1410.434267] env[62816]: DEBUG nova.network.neutron [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Port c37f87b0-7404-4bad-89e7-5ebbccb43aad binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1410.700500] env[62816]: DEBUG oslo_concurrency.lockutils [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] Releasing lock "refresh_cache-fb84cb48-d1a1-4eec-adb8-8edc585263df" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.700788] env[62816]: DEBUG nova.compute.manager [req-d11f342d-51bf-4b3f-8839-1787ebd811ba req-0b49dbb1-e7b1-43ff-a3c9-4d5972ab3a30 service nova] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Received event network-vif-deleted-c8a5f26b-46d0-41b0-b233-6fb55f960d71 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1410.787558] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788031, 'name': Rename_Task, 'duration_secs': 0.350128} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.788110] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1410.788896] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d7a4ecb-b616-4b69-b7d0-ded3c0bd404e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.797022] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1410.797022] env[62816]: value = "task-1788032" [ 1410.797022] env[62816]: _type = "Task" [ 1410.797022] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.806776] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.886212] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1410.886212] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b1827a73-6737-4165-8e23-ca940d912f94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.895580] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1410.895580] env[62816]: value = "task-1788033" [ 1410.895580] env[62816]: _type = "Task" [ 1410.895580] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.905137] env[62816]: DEBUG nova.scheduler.client.report [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1410.908317] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788033, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.918910] env[62816]: DEBUG oslo_vmware.api [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788030, 'name': RemoveSnapshot_Task, 'duration_secs': 0.913849} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.918910] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1410.928882] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Releasing lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1410.932020] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Instance network_info: |[{"id": "6b060db8-dee6-465b-8fb0-980f49a5e433", "address": "fa:16:3e:25:17:f5", "network": {"id": "dc60bf1f-0f94-4aba-8b9f-40ef9e49b444", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1385986515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48b0cb4d3e7844aea904384f8677bbc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27681cba-790d-451e-9d12-d179871f375a", "external-id": "cl2-zone-147", "segmentation_id": 147, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b060db8-de", "ovs_interfaceid": "6b060db8-dee6-465b-8fb0-980f49a5e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1410.932233] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:17:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27681cba-790d-451e-9d12-d179871f375a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b060db8-dee6-465b-8fb0-980f49a5e433', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1410.942289] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Creating folder: Project (48b0cb4d3e7844aea904384f8677bbc0). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1410.953297] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f8bc9b-91d2-4ef9-8db3-249e204ef7ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.966164] env[62816]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1410.966399] env[62816]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62816) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1410.967188] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Folder already exists: Project (48b0cb4d3e7844aea904384f8677bbc0). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1410.967796] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Creating folder: Instances. Parent ref: group-v370906. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1410.967796] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ffcb4bb-ef5b-4d02-a463-bbc85b1ab4d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.983861] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Created folder: Instances in parent group-v370906. [ 1410.984141] env[62816]: DEBUG oslo.service.loopingcall [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1410.984545] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1410.984910] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3dc773d-a5f5-4b20-99b2-2a523a70f0e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.006425] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1411.006425] env[62816]: value = "task-1788036" [ 1411.006425] env[62816]: _type = "Task" [ 1411.006425] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.015705] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.311967] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788032, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.406781] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788033, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.413132] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.413642] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1411.417703] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.948s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.418863] env[62816]: INFO nova.compute.claims [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1411.429247] env[62816]: WARNING nova.compute.manager [None req-3898e8c3-f5bf-44f3-8f8c-1b5035b3a6c7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Image not found during snapshot: nova.exception.ImageNotFound: Image 4ef107ae-3677-4109-985c-4301290811e8 could not be found. [ 1411.475505] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.475739] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1411.475820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1411.477945] env[62816]: INFO nova.compute.manager [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Rebuilding instance [ 1411.527120] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.539497] env[62816]: DEBUG nova.compute.manager [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1411.541027] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8afec31-cfb8-44e4-9202-ce686aa51327 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.812266] env[62816]: DEBUG oslo_vmware.api [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788032, 'name': PowerOnVM_Task, 'duration_secs': 0.741057} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.812717] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1411.812945] env[62816]: INFO nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Took 10.42 seconds to spawn the instance on the hypervisor. [ 1411.813155] env[62816]: DEBUG nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1411.813944] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31b942ca-1377-45b0-999e-376fc176c977 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.908512] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788033, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.915760] env[62816]: DEBUG nova.compute.manager [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Received event network-changed-6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1411.915933] env[62816]: DEBUG nova.compute.manager [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Refreshing instance network info cache due to event network-changed-6b060db8-dee6-465b-8fb0-980f49a5e433. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1411.916186] env[62816]: DEBUG oslo_concurrency.lockutils [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] Acquiring lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1411.916331] env[62816]: DEBUG oslo_concurrency.lockutils [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] Acquired lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1411.916489] env[62816]: DEBUG nova.network.neutron [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Refreshing network info cache for port 6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1411.918534] env[62816]: DEBUG nova.compute.utils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1411.921506] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1411.924976] env[62816]: DEBUG nova.network.neutron [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1412.009149] env[62816]: DEBUG nova.policy [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cafa262bf2b425c8bd63595e05462c4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f6d8f496c30b49198350d4cf28b51564', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1412.028790] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.052962] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1412.052962] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0696f1e2-cdd2-454b-8a4d-6a979ccc3fee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.062252] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1412.062252] env[62816]: value = "task-1788037" [ 1412.062252] env[62816]: _type = "Task" [ 1412.062252] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.075737] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.338448] env[62816]: INFO nova.compute.manager [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Took 29.95 seconds to build instance. [ 1412.410574] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788033, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.428669] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1412.527211] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.572653] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788037, 'name': PowerOffVM_Task, 'duration_secs': 0.13656} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.572916] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1412.573162] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1412.573966] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a770f5fb-67e1-45b1-9dd9-b23e9073c570 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.587592] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1412.588627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.588801] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.588971] env[62816]: DEBUG nova.network.neutron [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1412.590127] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85332b01-3653-417d-92ec-3be6f83df752 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.616963] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1412.617234] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1412.617441] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Deleting the datastore file [datastore1] 66745316-2735-4c49-b1a2-f9e547211761 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1412.617703] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4693e621-8de4-4abe-a51e-fa87638d091e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.630843] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1412.630843] env[62816]: value = "task-1788039" [ 1412.630843] env[62816]: _type = "Task" [ 1412.630843] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.641081] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.748898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "927badc2-decf-49af-b2c0-d95b471272c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.748898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "927badc2-decf-49af-b2c0-d95b471272c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.748898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "927badc2-decf-49af-b2c0-d95b471272c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1412.748898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "927badc2-decf-49af-b2c0-d95b471272c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1412.749213] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "927badc2-decf-49af-b2c0-d95b471272c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.751400] env[62816]: INFO nova.compute.manager [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Terminating instance [ 1412.753791] env[62816]: DEBUG nova.compute.manager [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1412.754013] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1412.755956] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba4a208-7744-45f6-9ee4-469f20a0a690 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.764699] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1412.769137] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a10f04d-a2cd-4044-bf9b-e9ff31253e37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.778823] env[62816]: DEBUG oslo_vmware.api [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1412.778823] env[62816]: value = "task-1788040" [ 1412.778823] env[62816]: _type = "Task" [ 1412.778823] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.792073] env[62816]: DEBUG oslo_vmware.api [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788040, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.844638] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a357fed-1efe-4a71-9e22-54f46fabc0c8 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.415s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1412.892385] env[62816]: DEBUG nova.network.neutron [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Successfully created port: ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1412.913268] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788033, 'name': CloneVM_Task, 'duration_secs': 1.6674} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.917239] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Created linked-clone VM from snapshot [ 1412.918796] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d65e0b-a490-468f-b989-3fa188b9bf00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.927639] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Uploading image e97c3ce6-ef84-42c7-be27-2e8035941732 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1412.969235] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1412.969235] env[62816]: value = "vm-370962" [ 1412.969235] env[62816]: _type = "VirtualMachine" [ 1412.969235] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1412.969440] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b1cbbe24-1707-4575-b244-fcbfa552140b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.980605] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease: (returnval){ [ 1412.980605] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a24a52-8fc4-59ed-8255-0bb0844f4e3f" [ 1412.980605] env[62816]: _type = "HttpNfcLease" [ 1412.980605] env[62816]: } obtained for exporting VM: (result){ [ 1412.980605] env[62816]: value = "vm-370962" [ 1412.980605] env[62816]: _type = "VirtualMachine" [ 1412.980605] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1412.981065] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the lease: (returnval){ [ 1412.981065] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a24a52-8fc4-59ed-8255-0bb0844f4e3f" [ 1412.981065] env[62816]: _type = "HttpNfcLease" [ 1412.981065] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1412.989072] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1412.989072] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a24a52-8fc4-59ed-8255-0bb0844f4e3f" [ 1412.989072] env[62816]: _type = "HttpNfcLease" [ 1412.989072] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1413.029217] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.083018] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-051f0352-9508-402d-8a3c-2697d79c09d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.089862] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813a436e-a79b-469c-90e0-84fc53655377 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.128161] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3b96ae-a447-4bdf-be33-9b7902f8a710 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.145093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c48e36-cb8a-4f0f-8057-92dd03c5cc4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.148947] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169703} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.149364] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1413.149701] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1413.150028] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1413.163458] env[62816]: DEBUG nova.compute.provider_tree [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.291495] env[62816]: DEBUG oslo_vmware.api [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788040, 'name': PowerOffVM_Task, 'duration_secs': 0.274504} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.291803] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1413.291977] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1413.292261] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e0a3189-bdad-497f-8355-bb4492a5863f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.349488] env[62816]: DEBUG nova.network.neutron [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updated VIF entry in instance network info cache for port 6b060db8-dee6-465b-8fb0-980f49a5e433. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1413.349796] env[62816]: DEBUG nova.network.neutron [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updating instance_info_cache with network_info: [{"id": "6b060db8-dee6-465b-8fb0-980f49a5e433", "address": "fa:16:3e:25:17:f5", "network": {"id": "dc60bf1f-0f94-4aba-8b9f-40ef9e49b444", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1385986515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48b0cb4d3e7844aea904384f8677bbc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27681cba-790d-451e-9d12-d179871f375a", "external-id": "cl2-zone-147", "segmentation_id": 147, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b060db8-de", "ovs_interfaceid": "6b060db8-dee6-465b-8fb0-980f49a5e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.354903] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1413.384601] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1413.384923] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1413.385097] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleting the datastore file [datastore1] 927badc2-decf-49af-b2c0-d95b471272c9 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1413.385368] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b224467-2d5a-4c91-ab34-9bee7c9f7029 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.392217] env[62816]: DEBUG oslo_vmware.api [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1413.392217] env[62816]: value = "task-1788043" [ 1413.392217] env[62816]: _type = "Task" [ 1413.392217] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.401962] env[62816]: DEBUG oslo_vmware.api [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788043, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.438565] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1413.469232] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1413.469491] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1413.469645] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1413.469828] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1413.469971] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1413.470367] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1413.470628] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1413.470791] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1413.470961] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1413.471147] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1413.471326] env[62816]: DEBUG nova.virt.hardware [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1413.472555] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cac542c-0e1c-4cba-9c83-05facace6214 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.488170] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d389bb19-e3aa-48df-9c11-ad029997758e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.495805] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1413.495805] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a24a52-8fc4-59ed-8255-0bb0844f4e3f" [ 1413.495805] env[62816]: _type = "HttpNfcLease" [ 1413.495805] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1413.505542] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1413.505542] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a24a52-8fc4-59ed-8255-0bb0844f4e3f" [ 1413.505542] env[62816]: _type = "HttpNfcLease" [ 1413.505542] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1413.506586] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de499982-3005-43e9-ae4c-19856a5ee6d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.515358] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd02af-631d-aad2-7cee-df1adeb022cd/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1413.515560] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd02af-631d-aad2-7cee-df1adeb022cd/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1413.588520] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.668680] env[62816]: DEBUG nova.scheduler.client.report [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1413.684618] env[62816]: DEBUG nova.network.neutron [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.698031] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-21c09ba9-f150-4623-85df-92c3c4197e83 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.853211] env[62816]: DEBUG oslo_concurrency.lockutils [req-af115267-0e93-46f8-a71c-ce838866bdf6 req-0bea706f-25b3-4b95-9436-ee2826478efa service nova] Releasing lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.902043] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.905475] env[62816]: DEBUG oslo_vmware.api [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788043, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155867} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.905805] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1413.906029] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1413.906257] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1413.906896] env[62816]: INFO nova.compute.manager [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1413.906896] env[62816]: DEBUG oslo.service.loopingcall [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1413.906896] env[62816]: DEBUG nova.compute.manager [-] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1413.907073] env[62816]: DEBUG nova.network.neutron [-] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1414.031084] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788036, 'name': CreateVM_Task, 'duration_secs': 3.003481} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.031319] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1414.032172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-370909', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'name': 'volume-76167f57-102e-45d9-8256-5434bbce481e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1914aaa-1f3d-48b7-a6d2-ceea16dc786a', 'attached_at': '', 'detached_at': '', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'serial': '76167f57-102e-45d9-8256-5434bbce481e'}, 'disk_bus': None, 'delete_on_termination': True, 'attachment_id': '09532d56-5a40-445f-8d7d-dfc8d55a9da7', 'volume_type': None}], 'swap': None} {{(pid=62816) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1414.032391] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Root volume attach. Driver type: vmdk {{(pid=62816) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1414.033310] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f69163d-d399-4912-b1e0-1a5d27685a46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.043580] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f77dea9-5b3b-42b7-8f14-b9de35e155a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.054083] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba690370-25f5-41fb-9294-14466d3cc81a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.063742] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-50b76290-da38-4730-8964-242885da8e4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.074045] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1414.074045] env[62816]: value = "task-1788044" [ 1414.074045] env[62816]: _type = "Task" [ 1414.074045] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.082811] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788044, 'name': RelocateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.174718] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.174979] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1414.179029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.453s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.179262] env[62816]: DEBUG nova.objects.instance [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1414.187387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.212307] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1414.218487] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1414.218487] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1414.218487] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1414.218487] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1414.218487] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1414.218487] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1414.218926] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1414.218926] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1414.218926] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1414.218926] env[62816]: DEBUG nova.virt.hardware [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1414.218926] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f44d9c-e88f-402d-9081-e69c12a062c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.235776] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66a67c1-07e4-45bc-9b69-66113da92f3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.258907] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1414.267184] env[62816]: DEBUG oslo.service.loopingcall [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1414.268182] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1414.268850] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9338f1c-4812-410c-b828-4910af01648f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.293677] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1414.293677] env[62816]: value = "task-1788045" [ 1414.293677] env[62816]: _type = "Task" [ 1414.293677] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.304448] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788045, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.585742] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788044, 'name': RelocateVM_Task, 'duration_secs': 0.450684} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.586119] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1414.586378] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-370909', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'name': 'volume-76167f57-102e-45d9-8256-5434bbce481e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1914aaa-1f3d-48b7-a6d2-ceea16dc786a', 'attached_at': '', 'detached_at': '', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'serial': '76167f57-102e-45d9-8256-5434bbce481e'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1414.587441] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020ac070-6300-49c1-a034-19800c83e110 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.614918] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770a01bc-231b-4fae-a5fd-ca50f7ffd0d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.644617] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] volume-76167f57-102e-45d9-8256-5434bbce481e/volume-76167f57-102e-45d9-8256-5434bbce481e.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1414.644617] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b30fc90-6be6-4d65-aabd-b532d559033e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.666099] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1414.666099] env[62816]: value = "task-1788046" [ 1414.666099] env[62816]: _type = "Task" [ 1414.666099] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.675636] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788046, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.689028] env[62816]: DEBUG nova.compute.utils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1414.690516] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1414.690700] env[62816]: DEBUG nova.network.neutron [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1414.723385] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17be9287-c01e-48fe-8eba-623cbbd87c02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.747445] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b07d8fd-0903-4fc6-8536-a0409ff7de36 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.755718] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1414.792033] env[62816]: DEBUG nova.policy [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '932a4f8d29fb472fb09983fd4c84288b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2f09a23020874a6798ef4d132f6ec845', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1414.804876] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788045, 'name': CreateVM_Task, 'duration_secs': 0.34712} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.804983] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1414.806246] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.806246] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1414.806246] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1414.806516] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9471759e-102c-4d1a-8ab4-4cf205bc38d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.811605] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1414.811605] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5220c015-3769-52e6-ebac-cabb5f09cdd2" [ 1414.811605] env[62816]: _type = "Task" [ 1414.811605] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.822893] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5220c015-3769-52e6-ebac-cabb5f09cdd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.180718] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.195358] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1415.201602] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3046c38-8540-42c3-b71b-24064d08b4af tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.202842] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.471s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.204290] env[62816]: INFO nova.compute.claims [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.264942] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1415.264942] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c02cfab-950a-479e-b0dd-b1b95a8eaf42 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.276390] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1415.276390] env[62816]: value = "task-1788047" [ 1415.276390] env[62816]: _type = "Task" [ 1415.276390] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.288869] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788047, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.296938] env[62816]: DEBUG nova.network.neutron [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Successfully created port: ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1415.330145] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5220c015-3769-52e6-ebac-cabb5f09cdd2, 'name': SearchDatastore_Task, 'duration_secs': 0.010102} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.330845] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.331418] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1415.331418] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1415.332149] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.332480] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1415.332787] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a5ae1bd0-59a9-44cb-828b-75c384a80d0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.343599] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1415.343791] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1415.344570] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ba7abb-f05d-4dae-ab5f-0ca1a1568d61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.352824] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1415.352824] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52453016-ffd8-7cbe-8a13-33a4f1f4fc4e" [ 1415.352824] env[62816]: _type = "Task" [ 1415.352824] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.360423] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52453016-ffd8-7cbe-8a13-33a4f1f4fc4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.679099] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.786421] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788047, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.868194] env[62816]: DEBUG nova.network.neutron [-] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1415.869816] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52453016-ffd8-7cbe-8a13-33a4f1f4fc4e, 'name': SearchDatastore_Task, 'duration_secs': 0.01028} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.870850] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92855393-f899-4308-bcd4-f645a41d30b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.880107] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1415.880107] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e858e9-62ce-69ae-df87-ce3650a80f04" [ 1415.880107] env[62816]: _type = "Task" [ 1415.880107] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.892125] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e858e9-62ce-69ae-df87-ce3650a80f04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.178662] env[62816]: DEBUG nova.network.neutron [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Successfully updated port: ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1416.194653] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "b409568f-6e04-4218-8a7b-1bbf785115c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.195031] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.195117] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788046, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.210135] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1416.248432] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1416.248819] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1416.249079] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1416.249357] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1416.249594] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1416.249840] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1416.250159] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1416.250511] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1416.250779] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1416.251070] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1416.252651] env[62816]: DEBUG nova.virt.hardware [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1416.255681] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b77593-20c7-47e2-a424-01b848ad4752 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.268095] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c5da02-fa21-4694-aa66-cdbe5102d63a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.301688] env[62816]: DEBUG oslo_vmware.api [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788047, 'name': PowerOnVM_Task, 'duration_secs': 0.918785} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.302150] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1416.302455] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eab879f7-b3f8-40ec-86bf-b74175697571 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance '3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1416.367133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "d16a99df-f092-4d56-9730-852883bbdb70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.367497] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.371859] env[62816]: INFO nova.compute.manager [-] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Took 2.46 seconds to deallocate network for instance. [ 1416.399977] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e858e9-62ce-69ae-df87-ce3650a80f04, 'name': SearchDatastore_Task, 'duration_secs': 0.010503} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.407444] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.407734] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1416.408888] env[62816]: DEBUG nova.compute.manager [req-800bc830-68a1-4773-8f80-9fc93a6f027d req-4be6f9b2-ae17-49bf-9886-3a89a0173615 service nova] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Received event network-vif-deleted-0797e610-fb6d-45a5-b6f3-5da9fd5eeca8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.409963] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-26528776-cb9b-435b-b5a5-3ffb5ee76fa2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.417830] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1416.417830] env[62816]: value = "task-1788048" [ 1416.417830] env[62816]: _type = "Task" [ 1416.417830] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.437728] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788048, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.683682] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "refresh_cache-52670f9e-0cb7-4464-be9c-7b0d8346f60f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.683682] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquired lock "refresh_cache-52670f9e-0cb7-4464-be9c-7b0d8346f60f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.683682] env[62816]: DEBUG nova.network.neutron [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.685262] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788046, 'name': ReconfigVM_Task, 'duration_secs': 1.530346} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1416.685262] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Reconfigured VM instance instance-00000010 to attach disk [datastore1] volume-76167f57-102e-45d9-8256-5434bbce481e/volume-76167f57-102e-45d9-8256-5434bbce481e.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1416.693580] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0590cb2-d9f6-48ca-82c6-db7110ef4084 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.711577] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1416.711577] env[62816]: value = "task-1788049" [ 1416.711577] env[62816]: _type = "Task" [ 1416.711577] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1416.725065] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.863017] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d5a763-9b2f-408a-9613-05cb1d37fe3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.898532] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cfbdf35-b78d-41c8-a4f5-c44e37c24e25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.917053] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.918544] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ad8ab6-fd39-480e-9be9-ea6dca534275 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.937546] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd2a7639-71c6-452f-9d9c-4f994740faa2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1416.942644] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788048, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.956271] env[62816]: DEBUG nova.compute.provider_tree [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.222833] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788049, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.250421] env[62816]: DEBUG nova.network.neutron [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1417.351253] env[62816]: DEBUG nova.network.neutron [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Successfully updated port: ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1417.435015] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788048, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.461038] env[62816]: DEBUG nova.scheduler.client.report [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1417.528466] env[62816]: DEBUG nova.network.neutron [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Updating instance_info_cache with network_info: [{"id": "ac77bb61-2646-4f53-9264-00c3c22c9859", "address": "fa:16:3e:e3:f4:31", "network": {"id": "951b4a86-6826-4dae-b6de-fd00b3d3c179", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-643599792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6d8f496c30b49198350d4cf28b51564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac77bb61-26", "ovs_interfaceid": "ac77bb61-2646-4f53-9264-00c3c22c9859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.730020] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788049, 'name': ReconfigVM_Task, 'duration_secs': 0.604077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.730020] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-370909', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'name': 'volume-76167f57-102e-45d9-8256-5434bbce481e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1914aaa-1f3d-48b7-a6d2-ceea16dc786a', 'attached_at': '', 'detached_at': '', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'serial': '76167f57-102e-45d9-8256-5434bbce481e'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1417.730020] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9722f4b3-9240-49ef-931d-aba603493a37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.737172] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1417.737172] env[62816]: value = "task-1788050" [ 1417.737172] env[62816]: _type = "Task" [ 1417.737172] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.751032] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788050, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.799905] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.799905] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.802515] env[62816]: DEBUG nova.compute.manager [req-346d781a-e448-48be-96c0-b6890e40d9b8 req-37db13c4-8e84-4aed-8bb8-62adfcabd3e2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Received event network-vif-plugged-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1417.802607] env[62816]: DEBUG oslo_concurrency.lockutils [req-346d781a-e448-48be-96c0-b6890e40d9b8 req-37db13c4-8e84-4aed-8bb8-62adfcabd3e2 service nova] Acquiring lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1417.802884] env[62816]: DEBUG oslo_concurrency.lockutils [req-346d781a-e448-48be-96c0-b6890e40d9b8 req-37db13c4-8e84-4aed-8bb8-62adfcabd3e2 service nova] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.803146] env[62816]: DEBUG oslo_concurrency.lockutils [req-346d781a-e448-48be-96c0-b6890e40d9b8 req-37db13c4-8e84-4aed-8bb8-62adfcabd3e2 service nova] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.803361] env[62816]: DEBUG nova.compute.manager [req-346d781a-e448-48be-96c0-b6890e40d9b8 req-37db13c4-8e84-4aed-8bb8-62adfcabd3e2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] No waiting events found dispatching network-vif-plugged-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1417.803515] env[62816]: WARNING nova.compute.manager [req-346d781a-e448-48be-96c0-b6890e40d9b8 req-37db13c4-8e84-4aed-8bb8-62adfcabd3e2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Received unexpected event network-vif-plugged-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 for instance with vm_state building and task_state spawning. [ 1417.855667] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.855967] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.856029] env[62816]: DEBUG nova.network.neutron [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1417.934966] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788048, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.084437} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.935542] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1417.935883] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1417.936235] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-819cb2c8-21d6-4a0b-aa08-d2cf657bc822 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.950024] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1417.950024] env[62816]: value = "task-1788051" [ 1417.950024] env[62816]: _type = "Task" [ 1417.950024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.960059] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.965972] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1417.968642] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1417.972090] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.316s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1417.972090] env[62816]: DEBUG nova.objects.instance [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1418.033208] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Releasing lock "refresh_cache-52670f9e-0cb7-4464-be9c-7b0d8346f60f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.033882] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Instance network_info: |[{"id": "ac77bb61-2646-4f53-9264-00c3c22c9859", "address": "fa:16:3e:e3:f4:31", "network": {"id": "951b4a86-6826-4dae-b6de-fd00b3d3c179", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-643599792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6d8f496c30b49198350d4cf28b51564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac77bb61-26", "ovs_interfaceid": "ac77bb61-2646-4f53-9264-00c3c22c9859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1418.034343] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:f4:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e839c46-1ae9-43b7-9518-8f18f48100dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ac77bb61-2646-4f53-9264-00c3c22c9859', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1418.046645] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Creating folder: Project (f6d8f496c30b49198350d4cf28b51564). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1418.048552] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8563f23-f244-4e7f-8b46-5beca0f861b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.060954] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Created folder: Project (f6d8f496c30b49198350d4cf28b51564) in parent group-v370905. [ 1418.061231] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Creating folder: Instances. Parent ref: group-v370966. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1418.061523] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6a27711-455f-471e-b281-a9202ec3ad96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.072063] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Created folder: Instances in parent group-v370966. [ 1418.072662] env[62816]: DEBUG oslo.service.loopingcall [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1418.072662] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1418.072776] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b7b2881-9c5c-49ab-a54d-3a6bbe53ce8f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.094713] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1418.094713] env[62816]: value = "task-1788054" [ 1418.094713] env[62816]: _type = "Task" [ 1418.094713] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.101176] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788054, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.249420] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788050, 'name': Rename_Task, 'duration_secs': 0.302387} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.249772] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1418.250079] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6a0932a3-3ee7-490f-92c5-f81ab943585e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.257879] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1418.257879] env[62816]: value = "task-1788055" [ 1418.257879] env[62816]: _type = "Task" [ 1418.257879] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.267762] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.285742] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.285981] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.406316] env[62816]: DEBUG nova.network.neutron [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1418.460889] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074216} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.464074] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1418.464074] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0903c038-74c4-43f4-b6e9-4d445e4518dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.490203] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1418.492161] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d5f88bd-ade4-4aa2-a3dd-9c7b8c9dda1f tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.521s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.494292] env[62816]: DEBUG nova.compute.utils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1418.495549] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-452fbc68-e4d4-4480-b09e-3f9d32d4a267 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.514381] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 20.042s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.514771] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.515107] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1418.515592] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.504s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.515971] env[62816]: DEBUG nova.objects.instance [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lazy-loading 'resources' on Instance uuid ce527ce8-07b6-47a6-bab9-7934a3dda9b3 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1418.518131] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1418.523894] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1418.524286] env[62816]: DEBUG nova.network.neutron [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1418.527076] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8421f38-4a12-43e6-9402-3a0f949cc207 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.542294] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42774f40-d5c0-4818-aafa-986ded912527 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.548127] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1418.548127] env[62816]: value = "task-1788056" [ 1418.548127] env[62816]: _type = "Task" [ 1418.548127] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.566241] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abb084d6-3072-4ef2-94a8-758f0908d636 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.573056] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.580746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a208758f-9c09-4023-9e64-f7bc9c5f8621 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.611965] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180941MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1418.612141] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.616140] env[62816]: DEBUG nova.policy [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40270b970a5941bbbc312901b6678117', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c51f82f2e9f042ce8c0ee7144c1d58ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1418.625899] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788054, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.663099] env[62816]: DEBUG nova.compute.manager [None req-dace92c3-fcf0-40ff-b5c3-51aea256c171 tempest-ServerDiagnosticsV248Test-1306065734 tempest-ServerDiagnosticsV248Test-1306065734-project-admin] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1418.664719] env[62816]: DEBUG nova.network.neutron [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [{"id": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "address": "fa:16:3e:17:5c:19", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffa08ef1-95", "ovs_interfaceid": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.667175] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c3c5c5-d06b-4a7d-9cfa-95cd1c3733cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.672085] env[62816]: DEBUG nova.compute.manager [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Received event network-vif-plugged-ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1418.672302] env[62816]: DEBUG oslo_concurrency.lockutils [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] Acquiring lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.672504] env[62816]: DEBUG oslo_concurrency.lockutils [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.675796] env[62816]: DEBUG oslo_concurrency.lockutils [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.675796] env[62816]: DEBUG nova.compute.manager [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] No waiting events found dispatching network-vif-plugged-ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1418.675796] env[62816]: WARNING nova.compute.manager [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Received unexpected event network-vif-plugged-ac77bb61-2646-4f53-9264-00c3c22c9859 for instance with vm_state building and task_state spawning. [ 1418.675796] env[62816]: DEBUG nova.compute.manager [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Received event network-changed-ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1418.675796] env[62816]: DEBUG nova.compute.manager [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Refreshing instance network info cache due to event network-changed-ac77bb61-2646-4f53-9264-00c3c22c9859. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1418.676201] env[62816]: DEBUG oslo_concurrency.lockutils [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] Acquiring lock "refresh_cache-52670f9e-0cb7-4464-be9c-7b0d8346f60f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.676201] env[62816]: DEBUG oslo_concurrency.lockutils [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] Acquired lock "refresh_cache-52670f9e-0cb7-4464-be9c-7b0d8346f60f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.676201] env[62816]: DEBUG nova.network.neutron [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Refreshing network info cache for port ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1418.679181] env[62816]: INFO nova.compute.manager [None req-dace92c3-fcf0-40ff-b5c3-51aea256c171 tempest-ServerDiagnosticsV248Test-1306065734 tempest-ServerDiagnosticsV248Test-1306065734-project-admin] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Retrieving diagnostics [ 1418.680013] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c65328-e19d-4158-a16a-e680425a6ba0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.768838] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788055, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.836294] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "e003e41d-93e8-4258-b8ca-3c2420b73df0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.836541] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.003776] env[62816]: DEBUG nova.network.neutron [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Successfully created port: 0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1419.063030] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788056, 'name': ReconfigVM_Task, 'duration_secs': 0.422665} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.063030] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 66745316-2735-4c49-b1a2-f9e547211761/66745316-2735-4c49-b1a2-f9e547211761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1419.067024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d9646758-837f-43e9-b534-1fcd677b94f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.070760] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1419.070760] env[62816]: value = "task-1788057" [ 1419.070760] env[62816]: _type = "Task" [ 1419.070760] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.082328] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788057, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.124588] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788054, 'name': CreateVM_Task, 'duration_secs': 0.588032} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.124744] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.126177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.126453] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.126852] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.130398] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b0d31d2-7602-43d1-9ec0-f46bc89bdf85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.136061] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1419.136061] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b8ff48-1bb4-e920-f6d8-d1979480e116" [ 1419.136061] env[62816]: _type = "Task" [ 1419.136061] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.146494] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b8ff48-1bb4-e920-f6d8-d1979480e116, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.170572] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.170998] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance network_info: |[{"id": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "address": "fa:16:3e:17:5c:19", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffa08ef1-95", "ovs_interfaceid": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1419.171494] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:5c:19', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4fb94adb-cc41-4c16-9830-a3205dbd2bf5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ffa08ef1-95a6-4f8b-b323-b76c08d6e671', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1419.180198] env[62816]: DEBUG oslo.service.loopingcall [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1419.185146] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1419.186484] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1ee489f-cae7-41bd-828b-becd92ba5df4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.209447] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1419.209447] env[62816]: value = "task-1788058" [ 1419.209447] env[62816]: _type = "Task" [ 1419.209447] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.221078] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788058, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.273912] env[62816]: DEBUG oslo_vmware.api [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788055, 'name': PowerOnVM_Task, 'duration_secs': 0.670211} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.274325] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1419.274434] env[62816]: INFO nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Took 9.19 seconds to spawn the instance on the hypervisor. [ 1419.274611] env[62816]: DEBUG nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1419.275688] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a3d501-27be-412b-b64b-0f0d92d25b19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.337500] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.337779] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.337972] env[62816]: DEBUG nova.compute.manager [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Going to confirm migration 1 {{(pid=62816) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1419.518058] env[62816]: DEBUG nova.network.neutron [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Updated VIF entry in instance network info cache for port ac77bb61-2646-4f53-9264-00c3c22c9859. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1419.518433] env[62816]: DEBUG nova.network.neutron [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Updating instance_info_cache with network_info: [{"id": "ac77bb61-2646-4f53-9264-00c3c22c9859", "address": "fa:16:3e:e3:f4:31", "network": {"id": "951b4a86-6826-4dae-b6de-fd00b3d3c179", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-643599792-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f6d8f496c30b49198350d4cf28b51564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapac77bb61-26", "ovs_interfaceid": "ac77bb61-2646-4f53-9264-00c3c22c9859", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1419.544097] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1419.581022] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1419.581022] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1419.581022] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1419.581372] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1419.581372] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1419.581372] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1419.581372] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1419.581372] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1419.581551] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1419.581551] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1419.581551] env[62816]: DEBUG nova.virt.hardware [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1419.581551] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f60294-d0b7-471d-bd87-1ef1b82a4097 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.590574] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788057, 'name': Rename_Task, 'duration_secs': 0.248523} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.592925] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1419.593255] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9120ca9f-d47f-4f20-9d84-2ac313f5866d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.596493] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e10a76f-35d0-4da2-bc5d-3247790bcd01 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.617496] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Waiting for the task: (returnval){ [ 1419.617496] env[62816]: value = "task-1788059" [ 1419.617496] env[62816]: _type = "Task" [ 1419.617496] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.627198] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788059, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.646934] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b8ff48-1bb4-e920-f6d8-d1979480e116, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.650133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.650389] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.650629] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.651038] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.651038] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1419.652625] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fabce1fb-903f-4049-a555-61f71a705820 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.665235] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1419.666621] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1419.670104] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7898259b-ae1a-4816-a6f4-0fc3d1ef7c28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.677151] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1419.677151] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52956aca-1d2f-28f2-fdb6-9f3a85532b71" [ 1419.677151] env[62816]: _type = "Task" [ 1419.677151] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.685734] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52956aca-1d2f-28f2-fdb6-9f3a85532b71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.719631] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788058, 'name': CreateVM_Task, 'duration_secs': 0.408395} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.720807] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1419.721564] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c835db-b814-4a84-b9ee-c8b4304ea301 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.724565] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.724731] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.725100] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1419.725666] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-684c9cf2-f121-40e7-8e26-0c1327e116bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.734587] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eabba090-3b2e-48d4-a35d-eadd0a441c99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.737796] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1419.737796] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5201d962-6000-c8d5-5be6-a3dbe360cd8f" [ 1419.737796] env[62816]: _type = "Task" [ 1419.737796] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.767623] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b7ac66-e16c-4a8d-928d-3bab21219a7b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.773864] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5201d962-6000-c8d5-5be6-a3dbe360cd8f, 'name': SearchDatastore_Task, 'duration_secs': 0.009295} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.774553] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.774788] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1419.774996] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.778754] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cd1d37-2552-4bba-835b-74fc546adcbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.800499] env[62816]: DEBUG nova.compute.provider_tree [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1419.801953] env[62816]: INFO nova.compute.manager [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Took 33.08 seconds to build instance. [ 1419.913325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1419.913521] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1419.913700] env[62816]: DEBUG nova.network.neutron [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1419.913885] env[62816]: DEBUG nova.objects.instance [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lazy-loading 'info_cache' on Instance uuid 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1420.025626] env[62816]: DEBUG oslo_concurrency.lockutils [req-03252cd8-e1d2-40b4-b967-58d661eb47de req-b986a06d-5cf8-4a04-a45f-53fc2a58bb02 service nova] Releasing lock "refresh_cache-52670f9e-0cb7-4464-be9c-7b0d8346f60f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.128778] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788059, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.143133] env[62816]: DEBUG nova.compute.manager [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Received event network-changed-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1420.143289] env[62816]: DEBUG nova.compute.manager [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Refreshing instance network info cache due to event network-changed-ffa08ef1-95a6-4f8b-b323-b76c08d6e671. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1420.143528] env[62816]: DEBUG oslo_concurrency.lockutils [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] Acquiring lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.145026] env[62816]: DEBUG oslo_concurrency.lockutils [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] Acquired lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.145026] env[62816]: DEBUG nova.network.neutron [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Refreshing network info cache for port ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1420.159041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "2bc7f973-007d-44bd-aae8-d3b62506efba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.159321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "2bc7f973-007d-44bd-aae8-d3b62506efba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.159656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "2bc7f973-007d-44bd-aae8-d3b62506efba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1420.163040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "2bc7f973-007d-44bd-aae8-d3b62506efba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.163040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "2bc7f973-007d-44bd-aae8-d3b62506efba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.167185] env[62816]: INFO nova.compute.manager [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Terminating instance [ 1420.171799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "refresh_cache-2bc7f973-007d-44bd-aae8-d3b62506efba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1420.172903] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquired lock "refresh_cache-2bc7f973-007d-44bd-aae8-d3b62506efba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.172903] env[62816]: DEBUG nova.network.neutron [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1420.193332] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52956aca-1d2f-28f2-fdb6-9f3a85532b71, 'name': SearchDatastore_Task, 'duration_secs': 0.021758} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.194613] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a92d08ab-77ea-43dc-a26a-ab86de6621eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.201264] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1420.201264] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529d4b76-acdb-e7aa-d369-7443381d099c" [ 1420.201264] env[62816]: _type = "Task" [ 1420.201264] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.213584] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529d4b76-acdb-e7aa-d369-7443381d099c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.303620] env[62816]: DEBUG nova.scheduler.client.report [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1420.307062] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef3c450a-f6ee-4212-b2dc-562f7dff000e tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.830s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.630816] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788059, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.712348] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529d4b76-acdb-e7aa-d369-7443381d099c, 'name': SearchDatastore_Task, 'duration_secs': 0.024374} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1420.712875] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1420.713284] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 52670f9e-0cb7-4464-be9c-7b0d8346f60f/52670f9e-0cb7-4464-be9c-7b0d8346f60f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1420.713733] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1420.714070] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1420.714554] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fdbf9e0-c1f4-4985-af4e-3688a7ce9410 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.719112] env[62816]: DEBUG nova.network.neutron [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1420.721558] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d68887c-a31c-41df-bbea-c6cbef4007da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.729135] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1420.729135] env[62816]: value = "task-1788060" [ 1420.729135] env[62816]: _type = "Task" [ 1420.729135] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.735336] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1420.739014] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1420.739612] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-471671a6-ab90-4b11-9bbd-edb77e89ea2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.746132] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.751563] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1420.751563] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524b9a03-ae1f-f1d9-de8a-d8539d6e4ea7" [ 1420.751563] env[62816]: _type = "Task" [ 1420.751563] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1420.761234] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524b9a03-ae1f-f1d9-de8a-d8539d6e4ea7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.808965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.293s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.811868] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.267s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1420.813459] env[62816]: INFO nova.compute.claims [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1420.816274] env[62816]: DEBUG nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1420.854240] env[62816]: INFO nova.scheduler.client.report [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Deleted allocations for instance ce527ce8-07b6-47a6-bab9-7934a3dda9b3 [ 1421.088270] env[62816]: DEBUG nova.network.neutron [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.134113] env[62816]: DEBUG oslo_vmware.api [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Task: {'id': task-1788059, 'name': PowerOnVM_Task, 'duration_secs': 1.085814} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.134629] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1421.134996] env[62816]: DEBUG nova.compute.manager [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1421.136129] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9009d58a-395f-4ff3-b46b-86739b1d8187 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.211219] env[62816]: DEBUG nova.network.neutron [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updated VIF entry in instance network info cache for port ffa08ef1-95a6-4f8b-b323-b76c08d6e671. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1421.211584] env[62816]: DEBUG nova.network.neutron [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [{"id": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "address": "fa:16:3e:17:5c:19", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffa08ef1-95", "ovs_interfaceid": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.242553] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788060, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.272104] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524b9a03-ae1f-f1d9-de8a-d8539d6e4ea7, 'name': SearchDatastore_Task, 'duration_secs': 0.010983} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.273343] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-779d70a0-35a5-4d9a-88bd-a2221a95e975 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.280123] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1421.280123] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525abc65-a2e6-165f-990d-b0a96e542688" [ 1421.280123] env[62816]: _type = "Task" [ 1421.280123] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.290471] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525abc65-a2e6-165f-990d-b0a96e542688, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.306015] env[62816]: DEBUG nova.network.neutron [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Successfully updated port: 0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1421.346652] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.365488] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2fc0cf0-97a6-483a-b692-ec82f5cb3715 tempest-ServerShowV257Test-2011464327 tempest-ServerShowV257Test-2011464327-project-member] Lock "ce527ce8-07b6-47a6-bab9-7934a3dda9b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.630s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.384812] env[62816]: DEBUG nova.network.neutron [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1421.594656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Releasing lock "refresh_cache-2bc7f973-007d-44bd-aae8-d3b62506efba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.595118] env[62816]: DEBUG nova.compute.manager [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1421.595313] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1421.596205] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab56209a-fd68-4e7d-8b63-5ed744b2a24f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.604184] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1421.604454] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-faaba009-d058-4a7f-b428-b61459e95a00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.611205] env[62816]: DEBUG oslo_vmware.api [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1421.611205] env[62816]: value = "task-1788061" [ 1421.611205] env[62816]: _type = "Task" [ 1421.611205] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.619178] env[62816]: DEBUG oslo_vmware.api [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.660695] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.715113] env[62816]: DEBUG oslo_concurrency.lockutils [req-16f4bb48-4ec8-46c4-96a4-996dc0254b63 req-07e954e1-bcbc-48f5-8289-3c81ca49bdc2 service nova] Releasing lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.742242] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.64335} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.742682] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 52670f9e-0cb7-4464-be9c-7b0d8346f60f/52670f9e-0cb7-4464-be9c-7b0d8346f60f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.742873] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.743199] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54817261-900e-446d-8108-3e036765bf44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.750556] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1421.750556] env[62816]: value = "task-1788062" [ 1421.750556] env[62816]: _type = "Task" [ 1421.750556] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.759329] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788062, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.791122] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525abc65-a2e6-165f-990d-b0a96e542688, 'name': SearchDatastore_Task, 'duration_secs': 0.075236} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.791414] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.791677] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4a6ac464-a5e0-4ed6-909d-f1730be14380/4a6ac464-a5e0-4ed6-909d-f1730be14380.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1421.792057] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4721ab0f-6dd2-45c5-94bd-f6b91d956207 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.799836] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1421.799836] env[62816]: value = "task-1788063" [ 1421.799836] env[62816]: _type = "Task" [ 1421.799836] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.809689] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "refresh_cache-914b187f-b05f-49d4-bf61-d536ef61934d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1421.809821] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquired lock "refresh_cache-914b187f-b05f-49d4-bf61-d536ef61934d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1421.809991] env[62816]: DEBUG nova.network.neutron [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1421.811278] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788063, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.889445] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1421.889856] env[62816]: DEBUG nova.objects.instance [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lazy-loading 'migration_context' on Instance uuid 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1422.122884] env[62816]: DEBUG oslo_vmware.api [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788061, 'name': PowerOffVM_Task, 'duration_secs': 0.20224} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.124665] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1422.124924] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1422.125272] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cf25e8a-cbb2-4a4f-9287-d667eac0d1b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.157748] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1422.158771] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1422.158875] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Deleting the datastore file [datastore1] 2bc7f973-007d-44bd-aae8-d3b62506efba {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1422.160068] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d05b553-8f0e-489d-a271-59340cb4cb02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.167268] env[62816]: DEBUG oslo_vmware.api [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for the task: (returnval){ [ 1422.167268] env[62816]: value = "task-1788065" [ 1422.167268] env[62816]: _type = "Task" [ 1422.167268] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.183504] env[62816]: DEBUG oslo_vmware.api [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788065, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.236248] env[62816]: DEBUG nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Received event network-vif-plugged-0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1422.236498] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Acquiring lock "914b187f-b05f-49d4-bf61-d536ef61934d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.236774] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Lock "914b187f-b05f-49d4-bf61-d536ef61934d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.237032] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Lock "914b187f-b05f-49d4-bf61-d536ef61934d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.237203] env[62816]: DEBUG nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] No waiting events found dispatching network-vif-plugged-0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1422.237365] env[62816]: WARNING nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Received unexpected event network-vif-plugged-0122f844-9db0-479d-adad-20dd495d1aa0 for instance with vm_state building and task_state spawning. [ 1422.237522] env[62816]: DEBUG nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Received event network-changed-6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1422.237673] env[62816]: DEBUG nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Refreshing instance network info cache due to event network-changed-6b060db8-dee6-465b-8fb0-980f49a5e433. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1422.237850] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Acquiring lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.237986] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Acquired lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.240258] env[62816]: DEBUG nova.network.neutron [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Refreshing network info cache for port 6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1422.263777] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788062, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07249} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.266862] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1422.268028] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610d32c0-7355-4121-ae2b-c663d7f17e99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.299345] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 52670f9e-0cb7-4464-be9c-7b0d8346f60f/52670f9e-0cb7-4464-be9c-7b0d8346f60f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1422.302702] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ceb53d5-42f6-490c-8963-3d416a4a6357 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.336649] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788063, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.338744] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1422.338744] env[62816]: value = "task-1788066" [ 1422.338744] env[62816]: _type = "Task" [ 1422.338744] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.352050] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.367371] env[62816]: DEBUG nova.network.neutron [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1422.392399] env[62816]: DEBUG nova.objects.base [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Object Instance<3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd> lazy-loaded attributes: info_cache,migration_context {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1422.393526] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3697605a-8ce5-4bfb-b19b-a8ba20231b6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.415611] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b56808d-ca87-4d40-854b-628bac458065 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.422022] env[62816]: DEBUG oslo_vmware.api [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1422.422022] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d7fc57-ad01-edd5-a81a-5e9d4a54fa7e" [ 1422.422022] env[62816]: _type = "Task" [ 1422.422022] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.433704] env[62816]: DEBUG oslo_vmware.api [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d7fc57-ad01-edd5-a81a-5e9d4a54fa7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.484657] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316fff8a-c791-47b6-9f94-e83066376611 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.492494] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a3050e-1cf4-429f-84f0-9f1f38f013a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.523711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "66745316-2735-4c49-b1a2-f9e547211761" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.523977] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "66745316-2735-4c49-b1a2-f9e547211761" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.524203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "66745316-2735-4c49-b1a2-f9e547211761-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.524387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "66745316-2735-4c49-b1a2-f9e547211761-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.524552] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "66745316-2735-4c49-b1a2-f9e547211761-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.526700] env[62816]: INFO nova.compute.manager [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Terminating instance [ 1422.528332] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "refresh_cache-66745316-2735-4c49-b1a2-f9e547211761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.528490] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquired lock "refresh_cache-66745316-2735-4c49-b1a2-f9e547211761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.528660] env[62816]: DEBUG nova.network.neutron [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1422.531181] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5f0a7a-5721-41e8-aec1-f44e920f0b4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.539790] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11da4b4f-ab68-4e13-a707-45db1f8910be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.554844] env[62816]: DEBUG nova.compute.provider_tree [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1422.662150] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd02af-631d-aad2-7cee-df1adeb022cd/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1422.666531] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d344af-9c86-4047-9b77-4ed2f9a6e101 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.682509] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd02af-631d-aad2-7cee-df1adeb022cd/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1422.682776] env[62816]: ERROR oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd02af-631d-aad2-7cee-df1adeb022cd/disk-0.vmdk due to incomplete transfer. [ 1422.683533] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fa0eb448-bb36-4b2e-a095-c1ed8f84b2ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.691582] env[62816]: DEBUG oslo_vmware.api [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Task: {'id': task-1788065, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378886} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.692385] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1422.692681] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1422.692937] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1422.693197] env[62816]: INFO nova.compute.manager [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1422.693533] env[62816]: DEBUG oslo.service.loopingcall [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1422.693794] env[62816]: DEBUG nova.compute.manager [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1422.693922] env[62816]: DEBUG nova.network.neutron [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1422.698254] env[62816]: DEBUG oslo_vmware.rw_handles [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52bd02af-631d-aad2-7cee-df1adeb022cd/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1422.698517] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Uploaded image e97c3ce6-ef84-42c7-be27-2e8035941732 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1422.701233] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1422.701591] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-50cfca8c-a9a9-4c2d-ab3d-b5bf41881880 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.715024] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1422.715024] env[62816]: value = "task-1788067" [ 1422.715024] env[62816]: _type = "Task" [ 1422.715024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.722888] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788067, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.730038] env[62816]: DEBUG nova.network.neutron [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1422.832158] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788063, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662229} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.833191] env[62816]: DEBUG nova.network.neutron [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Updating instance_info_cache with network_info: [{"id": "0122f844-9db0-479d-adad-20dd495d1aa0", "address": "fa:16:3e:fe:ae:06", "network": {"id": "dca407aa-b4bb-431c-8e7b-94b9ca635c23", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-911777667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c51f82f2e9f042ce8c0ee7144c1d58ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0122f844-9d", "ovs_interfaceid": "0122f844-9db0-479d-adad-20dd495d1aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1422.834917] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4a6ac464-a5e0-4ed6-909d-f1730be14380/4a6ac464-a5e0-4ed6-909d-f1730be14380.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1422.835202] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1422.837791] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9a2006f-09ef-424f-a2b9-df047cfdd5a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.848636] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1422.848636] env[62816]: value = "task-1788068" [ 1422.848636] env[62816]: _type = "Task" [ 1422.848636] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.857667] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788066, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.863243] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788068, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.934321] env[62816]: DEBUG oslo_vmware.api [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d7fc57-ad01-edd5-a81a-5e9d4a54fa7e, 'name': SearchDatastore_Task, 'duration_secs': 0.026385} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.934806] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.062128] env[62816]: DEBUG nova.network.neutron [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1423.089245] env[62816]: ERROR nova.scheduler.client.report [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [req-0b59e9d0-bcfe-4e37-aba5-6dd1063bd077] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0b59e9d0-bcfe-4e37-aba5-6dd1063bd077"}]} [ 1423.111192] env[62816]: DEBUG nova.scheduler.client.report [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1423.136058] env[62816]: DEBUG nova.scheduler.client.report [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1423.139699] env[62816]: DEBUG nova.compute.provider_tree [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1423.149241] env[62816]: DEBUG nova.scheduler.client.report [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1423.172998] env[62816]: DEBUG nova.scheduler.client.report [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1423.176118] env[62816]: DEBUG nova.network.neutron [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.222216] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788067, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.225740] env[62816]: DEBUG nova.network.neutron [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updated VIF entry in instance network info cache for port 6b060db8-dee6-465b-8fb0-980f49a5e433. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1423.226074] env[62816]: DEBUG nova.network.neutron [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updating instance_info_cache with network_info: [{"id": "6b060db8-dee6-465b-8fb0-980f49a5e433", "address": "fa:16:3e:25:17:f5", "network": {"id": "dc60bf1f-0f94-4aba-8b9f-40ef9e49b444", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1385986515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "48b0cb4d3e7844aea904384f8677bbc0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27681cba-790d-451e-9d12-d179871f375a", "external-id": "cl2-zone-147", "segmentation_id": 147, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b060db8-de", "ovs_interfaceid": "6b060db8-dee6-465b-8fb0-980f49a5e433", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.234746] env[62816]: DEBUG nova.network.neutron [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.336136] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Releasing lock "refresh_cache-914b187f-b05f-49d4-bf61-d536ef61934d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.336832] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Instance network_info: |[{"id": "0122f844-9db0-479d-adad-20dd495d1aa0", "address": "fa:16:3e:fe:ae:06", "network": {"id": "dca407aa-b4bb-431c-8e7b-94b9ca635c23", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-911777667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c51f82f2e9f042ce8c0ee7144c1d58ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0122f844-9d", "ovs_interfaceid": "0122f844-9db0-479d-adad-20dd495d1aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1423.337939] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:ae:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6eb7e3e9-5cc2-40f1-a6eb-f70f06531667', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0122f844-9db0-479d-adad-20dd495d1aa0', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1423.345936] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Creating folder: Project (c51f82f2e9f042ce8c0ee7144c1d58ef). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1423.349373] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-492a3cc8-91b5-4ae5-9b1d-c8a7f88a81d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.363138] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788066, 'name': ReconfigVM_Task, 'duration_secs': 0.596018} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.369591] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 52670f9e-0cb7-4464-be9c-7b0d8346f60f/52670f9e-0cb7-4464-be9c-7b0d8346f60f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1423.370672] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788068, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07478} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.372781] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddbf6cf5-502e-4179-9430-cd3ccab40537 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.374391] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1423.374681] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Created folder: Project (c51f82f2e9f042ce8c0ee7144c1d58ef) in parent group-v370905. [ 1423.374884] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Creating folder: Instances. Parent ref: group-v370970. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1423.376061] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350146e6-8e9e-417f-af7c-39815eb61e5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.378115] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0f5401f-a881-4d12-8435-8506fa8d102b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.395662] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1423.395662] env[62816]: value = "task-1788070" [ 1423.395662] env[62816]: _type = "Task" [ 1423.395662] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.405669] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 4a6ac464-a5e0-4ed6-909d-f1730be14380/4a6ac464-a5e0-4ed6-909d-f1730be14380.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1423.410648] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f770010-a3c3-44fa-8e8c-eb37bb0945a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.425928] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Created folder: Instances in parent group-v370970. [ 1423.426235] env[62816]: DEBUG oslo.service.loopingcall [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1423.430058] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1423.430813] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2340ded7-0084-4848-bbca-e6f2651166e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.451605] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1423.451605] env[62816]: value = "task-1788072" [ 1423.451605] env[62816]: _type = "Task" [ 1423.451605] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.460073] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788070, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.460166] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1423.460166] env[62816]: value = "task-1788073" [ 1423.460166] env[62816]: _type = "Task" [ 1423.460166] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.466659] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.473913] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788073, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.681661] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Releasing lock "refresh_cache-66745316-2735-4c49-b1a2-f9e547211761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.682185] env[62816]: DEBUG nova.compute.manager [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1423.682391] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1423.683292] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4c5ec6-7151-4d50-8086-a7f45529439a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.691359] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1423.692447] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-175aec26-63c0-41ea-a503-328313085cca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.701051] env[62816]: DEBUG oslo_vmware.api [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1423.701051] env[62816]: value = "task-1788074" [ 1423.701051] env[62816]: _type = "Task" [ 1423.701051] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.709995] env[62816]: DEBUG oslo_vmware.api [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.721257] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788067, 'name': Destroy_Task, 'duration_secs': 0.873628} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.722287] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Destroyed the VM [ 1423.722287] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1423.722287] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-530fd56d-73ca-4416-9004-621fdad047e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.730149] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Releasing lock "refresh_cache-f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1423.730921] env[62816]: DEBUG nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Received event network-changed-0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1423.731154] env[62816]: DEBUG nova.compute.manager [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Refreshing instance network info cache due to event network-changed-0122f844-9db0-479d-adad-20dd495d1aa0. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1423.731391] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Acquiring lock "refresh_cache-914b187f-b05f-49d4-bf61-d536ef61934d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.731574] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Acquired lock "refresh_cache-914b187f-b05f-49d4-bf61-d536ef61934d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.731888] env[62816]: DEBUG nova.network.neutron [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Refreshing network info cache for port 0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1423.737116] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1423.737116] env[62816]: value = "task-1788075" [ 1423.737116] env[62816]: _type = "Task" [ 1423.737116] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.739457] env[62816]: INFO nova.compute.manager [-] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Took 1.05 seconds to deallocate network for instance. [ 1423.739916] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "455052cc-292a-414c-8c83-bc512c49a197" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.741809] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.742052] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "455052cc-292a-414c-8c83-bc512c49a197-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.742258] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.742580] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.744740] env[62816]: INFO nova.compute.manager [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Terminating instance [ 1423.746655] env[62816]: DEBUG nova.compute.manager [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1423.746837] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1423.754893] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335b1b0b-9934-4916-8e8b-ff27186bf1df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.764210] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788075, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.768859] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1423.769151] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47f46a0c-a286-4e5a-891e-60edfdd2c8c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.867023] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e124ea6-4613-4bf3-9088-bad323085c54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.875729] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1414a75e-2ad3-4336-8070-0676ec400ecd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.905831] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0e3f9e-4980-4a8b-8793-e978cec855ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.918595] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a79ed1a-a214-48cf-9660-9645761e1bb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.926744] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788070, 'name': Rename_Task, 'duration_secs': 0.224167} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.927782] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1423.928055] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37200ea5-dc06-48c4-a927-2031296f27f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.937221] env[62816]: DEBUG nova.compute.provider_tree [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1423.945177] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1423.945177] env[62816]: value = "task-1788077" [ 1423.945177] env[62816]: _type = "Task" [ 1423.945177] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.958104] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788077, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.971254] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788072, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.975148] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788073, 'name': CreateVM_Task, 'duration_secs': 0.385288} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.975148] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1423.975814] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1423.976635] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1423.976635] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1423.976635] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00277557-89e0-413f-8736-b30e177ccc90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.981079] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1423.981079] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521bc1cb-1290-104d-a399-0708d38861cf" [ 1423.981079] env[62816]: _type = "Task" [ 1423.981079] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1423.988891] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521bc1cb-1290-104d-a399-0708d38861cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.210313] env[62816]: DEBUG oslo_vmware.api [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788074, 'name': PowerOffVM_Task, 'duration_secs': 0.138541} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.210636] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1424.210833] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1424.211144] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcff93f6-5b89-4fa7-a4b1-35e5ebee6870 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.235307] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.235307] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.235307] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Deleting the datastore file [datastore1] 66745316-2735-4c49-b1a2-f9e547211761 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.235307] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4dc9625f-2979-48a7-bcb8-f3c0f0e630fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.247448] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788075, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.248796] env[62816]: DEBUG oslo_vmware.api [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for the task: (returnval){ [ 1424.248796] env[62816]: value = "task-1788079" [ 1424.248796] env[62816]: _type = "Task" [ 1424.248796] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.260544] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.440448] env[62816]: DEBUG nova.scheduler.client.report [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1424.461376] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788077, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.473536] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788072, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.477326] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.477326] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.477326] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleting the datastore file [datastore1] 455052cc-292a-414c-8c83-bc512c49a197 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.477326] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6cfee0c-2d13-4a76-a01e-b86f82faa45d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.482686] env[62816]: DEBUG oslo_vmware.api [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1424.482686] env[62816]: value = "task-1788080" [ 1424.482686] env[62816]: _type = "Task" [ 1424.482686] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.497657] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521bc1cb-1290-104d-a399-0708d38861cf, 'name': SearchDatastore_Task, 'duration_secs': 0.00959} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.500939] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.501587] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1424.501587] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.501587] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.501875] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1424.502305] env[62816]: DEBUG oslo_vmware.api [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.505235] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d26874d4-3beb-4083-89a1-51f52c6082ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.507327] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "946dad01-c012-457d-8bfe-6395ff0aaedf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.507471] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.508292] env[62816]: DEBUG nova.network.neutron [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Updated VIF entry in instance network info cache for port 0122f844-9db0-479d-adad-20dd495d1aa0. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1424.508621] env[62816]: DEBUG nova.network.neutron [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Updating instance_info_cache with network_info: [{"id": "0122f844-9db0-479d-adad-20dd495d1aa0", "address": "fa:16:3e:fe:ae:06", "network": {"id": "dca407aa-b4bb-431c-8e7b-94b9ca635c23", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-911777667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c51f82f2e9f042ce8c0ee7144c1d58ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6eb7e3e9-5cc2-40f1-a6eb-f70f06531667", "external-id": "nsx-vlan-transportzone-938", "segmentation_id": 938, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0122f844-9d", "ovs_interfaceid": "0122f844-9db0-479d-adad-20dd495d1aa0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.516394] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1424.516487] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1424.517223] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f71cf02-36e0-4650-99e3-eac266fb9198 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.522888] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1424.522888] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52182704-b777-39fe-fd31-d2d514795631" [ 1424.522888] env[62816]: _type = "Task" [ 1424.522888] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.532250] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52182704-b777-39fe-fd31-d2d514795631, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.750038] env[62816]: DEBUG oslo_vmware.api [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788075, 'name': RemoveSnapshot_Task, 'duration_secs': 0.639309} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.750288] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1424.750527] env[62816]: INFO nova.compute.manager [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Took 16.00 seconds to snapshot the instance on the hypervisor. [ 1424.763225] env[62816]: DEBUG oslo_vmware.api [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Task: {'id': task-1788079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131356} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.763225] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.763225] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1424.763409] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1424.763462] env[62816]: INFO nova.compute.manager [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1424.763655] env[62816]: DEBUG oslo.service.loopingcall [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.763864] env[62816]: DEBUG nova.compute.manager [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1424.763966] env[62816]: DEBUG nova.network.neutron [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1424.778673] env[62816]: DEBUG nova.network.neutron [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1424.947223] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.136s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.947966] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1424.951768] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.401s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1424.954591] env[62816]: INFO nova.compute.claims [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1424.973044] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788072, 'name': ReconfigVM_Task, 'duration_secs': 1.327537} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.977651] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 4a6ac464-a5e0-4ed6-909d-f1730be14380/4a6ac464-a5e0-4ed6-909d-f1730be14380.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1424.978428] env[62816]: DEBUG oslo_vmware.api [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788077, 'name': PowerOnVM_Task, 'duration_secs': 0.642765} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.978645] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5196e349-ad01-4ac1-b988-a9286bc04d5d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.980792] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1424.981015] env[62816]: INFO nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Took 11.54 seconds to spawn the instance on the hypervisor. [ 1424.981199] env[62816]: DEBUG nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1424.982201] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8255a7-00e3-41af-816c-5601e35a1138 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.993958] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1424.993958] env[62816]: value = "task-1788081" [ 1424.993958] env[62816]: _type = "Task" [ 1424.993958] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.006239] env[62816]: DEBUG oslo_vmware.api [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152783} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.006943] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1425.007163] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1425.007586] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1425.007731] env[62816]: INFO nova.compute.manager [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1425.007983] env[62816]: DEBUG oslo.service.loopingcall [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1425.013446] env[62816]: DEBUG nova.compute.manager [-] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1425.013553] env[62816]: DEBUG nova.network.neutron [-] [instance: 455052cc-292a-414c-8c83-bc512c49a197] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.015495] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e98a3e0-6657-4552-818b-5bbe9569d2e8 req-0713bc35-e1aa-4d53-a433-f8f7f12dfbed service nova] Releasing lock "refresh_cache-914b187f-b05f-49d4-bf61-d536ef61934d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.016062] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788081, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.040103] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52182704-b777-39fe-fd31-d2d514795631, 'name': SearchDatastore_Task, 'duration_secs': 0.026726} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.042288] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9603e394-0803-4c84-8f59-16de4532f7e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.048952] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1425.048952] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52533958-a9ce-cb59-78dc-4039fb332c4b" [ 1425.048952] env[62816]: _type = "Task" [ 1425.048952] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.057965] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52533958-a9ce-cb59-78dc-4039fb332c4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.257480] env[62816]: DEBUG nova.compute.manager [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Instance disappeared during snapshot {{(pid=62816) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1425.277315] env[62816]: DEBUG nova.compute.manager [None req-be8fb865-7ce0-4f49-b58f-b4b9165eeb77 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image not found during clean up e97c3ce6-ef84-42c7-be27-2e8035941732 {{(pid=62816) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4501}} [ 1425.281083] env[62816]: DEBUG nova.network.neutron [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.468024] env[62816]: DEBUG nova.compute.utils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1425.477214] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1425.477214] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1425.508145] env[62816]: INFO nova.compute.manager [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Took 32.66 seconds to build instance. [ 1425.514220] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788081, 'name': Rename_Task, 'duration_secs': 0.165516} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.514978] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1425.515747] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6958bb13-e918-4419-a1e4-fad672e52570 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.524686] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1425.524686] env[62816]: value = "task-1788082" [ 1425.524686] env[62816]: _type = "Task" [ 1425.524686] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.534990] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.560236] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52533958-a9ce-cb59-78dc-4039fb332c4b, 'name': SearchDatastore_Task, 'duration_secs': 0.021004} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.562039] env[62816]: DEBUG nova.policy [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c6fc23d6e2d47938776335fbbf6b59e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e20c8f5bdd64f1d89157aa0b947431e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1425.563686] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.563995] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 914b187f-b05f-49d4-bf61-d536ef61934d/914b187f-b05f-49d4-bf61-d536ef61934d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1425.564319] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-59cc74df-b239-4213-a337-5ce84fc1d2f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.571428] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1425.571428] env[62816]: value = "task-1788083" [ 1425.571428] env[62816]: _type = "Task" [ 1425.571428] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.579301] env[62816]: DEBUG nova.compute.manager [req-3383df27-298c-427f-a271-9ca284fcf6c0 req-ff0a2108-7eb3-47e4-8802-b50e2fdc6e5f service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Received event network-vif-deleted-4d66ddae-3da1-44ea-a583-74e70147b7ac {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1425.580130] env[62816]: INFO nova.compute.manager [req-3383df27-298c-427f-a271-9ca284fcf6c0 req-ff0a2108-7eb3-47e4-8802-b50e2fdc6e5f service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Neutron deleted interface 4d66ddae-3da1-44ea-a583-74e70147b7ac; detaching it from the instance and deleting it from the info cache [ 1425.580130] env[62816]: DEBUG nova.network.neutron [req-3383df27-298c-427f-a271-9ca284fcf6c0 req-ff0a2108-7eb3-47e4-8802-b50e2fdc6e5f service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.584240] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.783777] env[62816]: INFO nova.compute.manager [-] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Took 1.02 seconds to deallocate network for instance. [ 1425.976030] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1425.988199] env[62816]: DEBUG nova.network.neutron [-] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.011050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f9bc1394-a933-4790-bf8a-738f013ff3d7 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.841s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.044963] env[62816]: DEBUG oslo_vmware.api [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788082, 'name': PowerOnVM_Task, 'duration_secs': 0.482632} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.045280] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1426.045475] env[62816]: INFO nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1426.045653] env[62816]: DEBUG nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1426.047910] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd831010-3705-4f19-8190-b82288fcb94a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.083563] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788083, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.086262] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ebabae6-de90-4ab3-acc7-36557c2451d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.097304] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928946da-6df0-4ba7-9206-66cf466d7ce6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.132237] env[62816]: DEBUG nova.compute.manager [req-3383df27-298c-427f-a271-9ca284fcf6c0 req-ff0a2108-7eb3-47e4-8802-b50e2fdc6e5f service nova] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Detach interface failed, port_id=4d66ddae-3da1-44ea-a583-74e70147b7ac, reason: Instance 455052cc-292a-414c-8c83-bc512c49a197 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1426.292333] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.336686] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.336934] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.339784] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.340058] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.003s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.340262] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.344979] env[62816]: INFO nova.compute.manager [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Terminating instance [ 1426.346498] env[62816]: DEBUG nova.compute.manager [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1426.346687] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1426.347538] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc00cbb-acf7-4071-9281-47a4f0762ceb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.355272] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1426.356178] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb3e2da3-eac4-4747-ba97-9a4c67fceb20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.368350] env[62816]: DEBUG oslo_vmware.api [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1426.368350] env[62816]: value = "task-1788084" [ 1426.368350] env[62816]: _type = "Task" [ 1426.368350] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.377971] env[62816]: DEBUG oslo_vmware.api [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.400083] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Successfully created port: 2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1426.496342] env[62816]: INFO nova.compute.manager [-] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Took 1.48 seconds to deallocate network for instance. [ 1426.515047] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1426.586930] env[62816]: INFO nova.compute.manager [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Took 33.14 seconds to build instance. [ 1426.598050] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.583741} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.598050] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 914b187f-b05f-49d4-bf61-d536ef61934d/914b187f-b05f-49d4-bf61-d536ef61934d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1426.598172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1426.598449] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a5231085-0a93-497f-90d3-279608c1a459 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.606409] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1426.606409] env[62816]: value = "task-1788085" [ 1426.606409] env[62816]: _type = "Task" [ 1426.606409] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.621297] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.652679] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e670f00-ab9d-4d9a-920c-7597c4eab6e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.662655] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c77dbf-d21d-45cb-b240-587e0e553828 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.693569] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8505514-1fb9-4d70-80cf-fdc708b31fd7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.700679] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf71205-b28e-4e0a-b1d6-89a99270d75a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.715502] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1426.879532] env[62816]: DEBUG oslo_vmware.api [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788084, 'name': PowerOffVM_Task, 'duration_secs': 0.468393} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.879532] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1426.879532] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1426.879532] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4ae17c5-2b20-4e4d-9037-36c002a2eae7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.952998] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Successfully created port: 54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1426.962542] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1426.962767] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1426.963573] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Deleting the datastore file [datastore1] 52670f9e-0cb7-4464-be9c-7b0d8346f60f {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1426.963573] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09c82116-e9c9-40be-ad5d-5601a8bf6bbc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.969945] env[62816]: DEBUG oslo_vmware.api [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for the task: (returnval){ [ 1426.969945] env[62816]: value = "task-1788087" [ 1426.969945] env[62816]: _type = "Task" [ 1426.969945] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.977701] env[62816]: DEBUG oslo_vmware.api [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.996635] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1427.002838] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.033564] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1427.033795] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1427.033982] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1427.034156] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1427.035597] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1427.035752] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1427.036537] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1427.037234] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1427.037477] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1427.037627] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1427.037798] env[62816]: DEBUG nova.virt.hardware [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1427.038697] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc09e3a5-d789-4de0-9169-c8e59ccd419a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.045296] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1427.053540] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691ec306-ced7-41ab-811b-a6ccbe82f6ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.090195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-726c679c-f021-429d-9f79-a22e57654ff4 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.115s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1427.115652] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066131} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.115841] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1427.116758] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a4b975-3336-4b41-a123-d515ad87c013 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.139388] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 914b187f-b05f-49d4-bf61-d536ef61934d/914b187f-b05f-49d4-bf61-d536ef61934d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.139921] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f610ea9a-423e-4115-bf27-da0e4e48b4e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.159764] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1427.159764] env[62816]: value = "task-1788088" [ 1427.159764] env[62816]: _type = "Task" [ 1427.159764] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.167814] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788088, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.242114] env[62816]: ERROR nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [req-bb4d0450-3906-44d2-b0b6-d0e590c3ff27] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bb4d0450-3906-44d2-b0b6-d0e590c3ff27"}]} [ 1427.264073] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1427.280641] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1427.280866] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1427.294729] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1427.315958] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Successfully created port: 27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1427.318677] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1427.480651] env[62816]: DEBUG oslo_vmware.api [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Task: {'id': task-1788087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181534} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.483702] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1427.484045] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1427.488102] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1427.488102] env[62816]: INFO nova.compute.manager [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1427.488102] env[62816]: DEBUG oslo.service.loopingcall [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.488102] env[62816]: DEBUG nova.compute.manager [-] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1427.488102] env[62816]: DEBUG nova.network.neutron [-] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1427.597940] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1427.670498] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788088, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.882615] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cdafdb-db97-4e75-ba33-a9d7a21835b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.892716] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13afdb7-1057-4600-bee2-0a5efe972c50 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.930233] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39bfa02-4da3-4f79-8c20-34fb4b17add5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.939113] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c881549e-e878-4cca-9c57-ad4bc47bd41a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.959178] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1428.128124] env[62816]: DEBUG nova.compute.manager [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.128377] env[62816]: DEBUG nova.compute.manager [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing instance network info cache due to event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1428.128600] env[62816]: DEBUG oslo_concurrency.lockutils [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] Acquiring lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1428.128741] env[62816]: DEBUG oslo_concurrency.lockutils [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] Acquired lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1428.129181] env[62816]: DEBUG nova.network.neutron [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1428.135990] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.170495] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788088, 'name': ReconfigVM_Task, 'duration_secs': 0.516744} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.170850] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 914b187f-b05f-49d4-bf61-d536ef61934d/914b187f-b05f-49d4-bf61-d536ef61934d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1428.171659] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a40dba6-924a-4587-9390-a5576ff15d58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.179061] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1428.179061] env[62816]: value = "task-1788089" [ 1428.179061] env[62816]: _type = "Task" [ 1428.179061] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.188533] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788089, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.212517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "48b74d52-e764-4d14-b372-fc34872205dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1428.212754] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "48b74d52-e764-4d14-b372-fc34872205dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1428.449524] env[62816]: DEBUG nova.network.neutron [-] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1428.491119] env[62816]: ERROR nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [req-6e7c6d1c-8dd2-4adc-8ea3-a9df8d013ef8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6e7c6d1c-8dd2-4adc-8ea3-a9df8d013ef8"}]} [ 1428.509350] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1428.529101] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1428.529328] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1428.544786] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1428.565676] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1428.588553] env[62816]: DEBUG nova.compute.manager [req-c9893a7f-5152-4940-9e55-71b29be120eb req-aca39c2a-5caa-4ea9-9b70-f753ec2f2882 service nova] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Received event network-vif-deleted-ac77bb61-2646-4f53-9264-00c3c22c9859 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.698850] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788089, 'name': Rename_Task, 'duration_secs': 0.301506} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.701965] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1428.702099] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3962d28-32be-4b78-a7b8-05170f14f4c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.708429] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1428.708429] env[62816]: value = "task-1788090" [ 1428.708429] env[62816]: _type = "Task" [ 1428.708429] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.716718] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788090, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.952332] env[62816]: INFO nova.compute.manager [-] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Took 1.47 seconds to deallocate network for instance. [ 1429.148123] env[62816]: DEBUG nova.network.neutron [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updated VIF entry in instance network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1429.148123] env[62816]: DEBUG nova.network.neutron [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.188307] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a31f88-07de-424a-9ba0-55ac9c0fcf65 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.197964] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d382faea-b1e3-4953-ae35-939565a5f914 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.237158] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc509fef-6c46-45a4-abe0-c02e510a0cf6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.246488] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788090, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.250028] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80026eeb-48a6-459d-bd81-485e0d8d9272 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.263712] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1429.423659] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Successfully updated port: 2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1429.459916] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1429.653709] env[62816]: DEBUG oslo_concurrency.lockutils [req-5c4810af-7f73-465d-a71b-b0adbfb37172 req-0d7097b7-4f6a-45f3-b2d6-37f623656ba0 service nova] Releasing lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1429.743476] env[62816]: DEBUG oslo_vmware.api [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788090, 'name': PowerOnVM_Task, 'duration_secs': 0.77178} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.743760] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.743963] env[62816]: INFO nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Took 10.20 seconds to spawn the instance on the hypervisor. [ 1429.744155] env[62816]: DEBUG nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1429.744913] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373d554b-6bd0-47f6-a92d-0b8e03eaf54e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.798247] env[62816]: DEBUG nova.scheduler.client.report [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 51 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1429.798552] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 51 to 52 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1429.798760] env[62816]: DEBUG nova.compute.provider_tree [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1430.260530] env[62816]: INFO nova.compute.manager [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Took 35.56 seconds to build instance. [ 1430.304560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.353s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.305103] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1430.307615] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.165s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.310151] env[62816]: INFO nova.compute.claims [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1430.763466] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d53fe1c-e0b1-43e3-8ac3-a8e91581668a tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "914b187f-b05f-49d4-bf61-d536ef61934d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.989s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.774200] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-vif-plugged-2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1430.774200] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquiring lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.774200] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.774200] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.774200] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] No waiting events found dispatching network-vif-plugged-2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1430.774643] env[62816]: WARNING nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received unexpected event network-vif-plugged-2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce for instance with vm_state building and task_state spawning. [ 1430.775382] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-changed-2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1430.775721] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Refreshing instance network info cache due to event network-changed-2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1430.776080] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquiring lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1430.778217] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquired lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1430.778217] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Refreshing network info cache for port 2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1430.813709] env[62816]: DEBUG nova.compute.utils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1430.816950] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1430.817173] env[62816]: DEBUG nova.network.neutron [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1430.862018] env[62816]: DEBUG nova.policy [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3262feba6b94eddbe93814c58894995', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13061618f3be488280c98f34ae12f4fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1431.265237] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1431.319928] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1431.323120] env[62816]: DEBUG nova.network.neutron [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Successfully created port: cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1431.325542] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1431.452160] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1431.786813] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.846252] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0f251e-d002-4cb1-a20b-030624a66592 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.854027] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7754d41b-412b-49b3-bab8-b31a7cc19df6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.889565] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950595ba-997a-4e16-a3bd-87f7dd439d2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.897957] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4106bfb9-f18f-41c7-8fc0-988a38d1d7f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.912007] env[62816]: DEBUG nova.compute.provider_tree [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.955228] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Releasing lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1431.955518] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1431.955757] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing instance network info cache due to event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1431.956073] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquiring lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1431.956073] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquired lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1431.956223] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1432.016296] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Successfully updated port: 54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1432.335588] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1432.363576] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1432.363695] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1432.364455] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1432.364455] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1432.364455] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1432.364455] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1432.364571] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1432.364714] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1432.364810] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1432.365085] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1432.365163] env[62816]: DEBUG nova.virt.hardware [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1432.366301] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc4dbee-72a3-457a-8fd2-91d6232980cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.376981] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792fffb4-dd73-4ca7-8cce-f320bcb2ef98 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.416412] env[62816]: DEBUG nova.scheduler.client.report [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1432.817030] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updated VIF entry in instance network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1432.817332] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.861505] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.861760] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.862201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1432.862285] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.862487] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.865080] env[62816]: INFO nova.compute.manager [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Terminating instance [ 1432.866470] env[62816]: DEBUG nova.compute.manager [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1432.866654] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1432.867493] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e21cc991-356f-48b9-aa19-f96f9b4b7067 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.875532] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1432.875760] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd66f931-f340-4f1e-b622-814852457e95 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.881484] env[62816]: DEBUG oslo_vmware.api [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1432.881484] env[62816]: value = "task-1788091" [ 1432.881484] env[62816]: _type = "Task" [ 1432.881484] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1432.890444] env[62816]: DEBUG oslo_vmware.api [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.920180] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1432.920783] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1432.927023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.500s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1432.927023] env[62816]: DEBUG nova.objects.instance [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lazy-loading 'resources' on Instance uuid 7be4c8f8-240c-4a71-93bb-aeb94243d781 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1433.165672] env[62816]: DEBUG nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Received event network-changed-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1433.165936] env[62816]: DEBUG nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Refreshing instance network info cache due to event network-changed-ffa08ef1-95a6-4f8b-b323-b76c08d6e671. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1433.166204] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Acquiring lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.166392] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Acquired lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.166593] env[62816]: DEBUG nova.network.neutron [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Refreshing network info cache for port ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1433.320370] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Releasing lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1433.320656] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Received event network-changed-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1433.320827] env[62816]: DEBUG nova.compute.manager [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Refreshing instance network info cache due to event network-changed-ffa08ef1-95a6-4f8b-b323-b76c08d6e671. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1433.321035] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquiring lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.393787] env[62816]: DEBUG oslo_vmware.api [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788091, 'name': PowerOffVM_Task, 'duration_secs': 0.274269} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.395704] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1433.396565] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1433.398754] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6e4b2af-da30-4cee-9574-f57f8e22b4dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.427794] env[62816]: DEBUG nova.compute.utils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1433.432624] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1433.432987] env[62816]: DEBUG nova.network.neutron [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1433.489055] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1433.489300] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1433.489627] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleting the datastore file [datastore1] 4a6ac464-a5e0-4ed6-909d-f1730be14380 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1433.493515] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-180e419e-2d71-491c-8925-58095121ec56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.502148] env[62816]: DEBUG oslo_vmware.api [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1433.502148] env[62816]: value = "task-1788093" [ 1433.502148] env[62816]: _type = "Task" [ 1433.502148] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1433.513564] env[62816]: DEBUG oslo_vmware.api [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.679304] env[62816]: DEBUG nova.network.neutron [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Successfully updated port: cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1433.708899] env[62816]: DEBUG nova.policy [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3262feba6b94eddbe93814c58894995', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13061618f3be488280c98f34ae12f4fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1433.933360] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1434.001531] env[62816]: DEBUG nova.network.neutron [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updated VIF entry in instance network info cache for port ffa08ef1-95a6-4f8b-b323-b76c08d6e671. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1434.002113] env[62816]: DEBUG nova.network.neutron [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [{"id": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "address": "fa:16:3e:17:5c:19", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapffa08ef1-95", "ovs_interfaceid": "ffa08ef1-95a6-4f8b-b323-b76c08d6e671", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.021673] env[62816]: DEBUG oslo_vmware.api [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35408} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1434.022493] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1434.022933] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1434.023876] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1434.023876] env[62816]: INFO nova.compute.manager [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1434.023876] env[62816]: DEBUG oslo.service.loopingcall [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1434.024542] env[62816]: DEBUG nova.compute.manager [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1434.024692] env[62816]: DEBUG nova.network.neutron [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1434.099373] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448a8eca-1ec8-4592-b057-8b8d69257d0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.107604] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34a9700-15d5-46cb-8b36-4a9269f788cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.143995] env[62816]: DEBUG nova.network.neutron [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Successfully created port: 8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1434.146466] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e55a2de-91a3-45a5-8c70-f07d52b9c1ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.154323] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd387d1-eafb-4332-b5d4-2e80d4266fcb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.171069] env[62816]: DEBUG nova.compute.provider_tree [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1434.188250] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.188250] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.188368] env[62816]: DEBUG nova.network.neutron [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1434.505383] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Releasing lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.505672] env[62816]: DEBUG nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-vif-plugged-54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1434.505876] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Acquiring lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.506123] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.506253] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.506420] env[62816]: DEBUG nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] No waiting events found dispatching network-vif-plugged-54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1434.506587] env[62816]: WARNING nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received unexpected event network-vif-plugged-54c5cba6-8e06-4d70-bb84-fc0420096ff8 for instance with vm_state building and task_state spawning. [ 1434.506847] env[62816]: DEBUG nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-changed-54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1434.507038] env[62816]: DEBUG nova.compute.manager [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Refreshing instance network info cache due to event network-changed-54c5cba6-8e06-4d70-bb84-fc0420096ff8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1434.507226] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Acquiring lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1434.507365] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Acquired lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.507523] env[62816]: DEBUG nova.network.neutron [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Refreshing network info cache for port 54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1434.515455] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Acquired lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1434.515665] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Refreshing network info cache for port ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1434.591298] env[62816]: DEBUG nova.compute.manager [req-8d55b2f3-03ba-4cf9-b91f-3488df5ccf9e req-8777a22d-8564-4434-8d4b-990c4ad92f7e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-vif-plugged-27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1434.591565] env[62816]: DEBUG oslo_concurrency.lockutils [req-8d55b2f3-03ba-4cf9-b91f-3488df5ccf9e req-8777a22d-8564-4434-8d4b-990c4ad92f7e service nova] Acquiring lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.591805] env[62816]: DEBUG oslo_concurrency.lockutils [req-8d55b2f3-03ba-4cf9-b91f-3488df5ccf9e req-8777a22d-8564-4434-8d4b-990c4ad92f7e service nova] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.592074] env[62816]: DEBUG oslo_concurrency.lockutils [req-8d55b2f3-03ba-4cf9-b91f-3488df5ccf9e req-8777a22d-8564-4434-8d4b-990c4ad92f7e service nova] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.592275] env[62816]: DEBUG nova.compute.manager [req-8d55b2f3-03ba-4cf9-b91f-3488df5ccf9e req-8777a22d-8564-4434-8d4b-990c4ad92f7e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] No waiting events found dispatching network-vif-plugged-27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1434.593373] env[62816]: WARNING nova.compute.manager [req-8d55b2f3-03ba-4cf9-b91f-3488df5ccf9e req-8777a22d-8564-4434-8d4b-990c4ad92f7e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received unexpected event network-vif-plugged-27b70ae2-92a3-40bb-b6d0-5a06e860d0ad for instance with vm_state building and task_state spawning. [ 1434.613111] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Successfully updated port: 27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1434.719301] env[62816]: DEBUG nova.scheduler.client.report [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 52 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1434.719606] env[62816]: DEBUG nova.compute.provider_tree [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 52 to 53 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1434.719797] env[62816]: DEBUG nova.compute.provider_tree [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1434.729074] env[62816]: DEBUG nova.network.neutron [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1434.898601] env[62816]: DEBUG nova.network.neutron [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Updating instance_info_cache with network_info: [{"id": "cd98f4df-d678-4280-8111-86d76a117d36", "address": "fa:16:3e:92:19:df", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd98f4df-d6", "ovs_interfaceid": "cd98f4df-d678-4280-8111-86d76a117d36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.948147] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1434.975766] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1434.976032] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1434.976203] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1434.976383] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1434.976530] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1434.976678] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1434.976887] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1434.977460] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1434.977460] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1434.977460] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1434.977647] env[62816]: DEBUG nova.virt.hardware [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1434.978404] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0a95ed-6d35-438d-a11a-1cb41e379249 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.986724] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e0c4331-cec8-4296-bb61-c2a3d5ad336c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.037129] env[62816]: DEBUG nova.network.neutron [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.044762] env[62816]: INFO nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Port ffa08ef1-95a6-4f8b-b323-b76c08d6e671 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1435.045020] env[62816]: DEBUG nova.network.neutron [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.063349] env[62816]: DEBUG nova.network.neutron [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1435.115868] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.166246] env[62816]: DEBUG nova.network.neutron [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1435.226719] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.303s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.230067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.354s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.231679] env[62816]: INFO nova.compute.claims [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1435.264305] env[62816]: INFO nova.scheduler.client.report [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleted allocations for instance 7be4c8f8-240c-4a71-93bb-aeb94243d781 [ 1435.398979] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Received event network-vif-plugged-cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1435.399173] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquiring lock "42093232-a4e5-4cc3-ab1c-a0023a91e102-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.399379] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.399546] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.399713] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] No waiting events found dispatching network-vif-plugged-cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1435.400073] env[62816]: WARNING nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Received unexpected event network-vif-plugged-cd98f4df-d678-4280-8111-86d76a117d36 for instance with vm_state building and task_state spawning. [ 1435.400073] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1435.400206] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing instance network info cache due to event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1435.400382] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquiring lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.400508] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquired lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.400667] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1435.402329] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.402439] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Instance network_info: |[{"id": "cd98f4df-d678-4280-8111-86d76a117d36", "address": "fa:16:3e:92:19:df", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd98f4df-d6", "ovs_interfaceid": "cd98f4df-d678-4280-8111-86d76a117d36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1435.402957] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:19:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ed91b7b-b4ec-486d-ab34-af0afb7ec691', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cd98f4df-d678-4280-8111-86d76a117d36', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1435.410613] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Creating folder: Project (13061618f3be488280c98f34ae12f4fa). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1435.411679] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cbbdfbaf-cd1b-4a18-8c82-0e6632ad3203 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.423403] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Created folder: Project (13061618f3be488280c98f34ae12f4fa) in parent group-v370905. [ 1435.423631] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Creating folder: Instances. Parent ref: group-v370973. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1435.423865] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d326376-b880-44fc-8a0b-03286e5a6e8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.432934] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Created folder: Instances in parent group-v370973. [ 1435.433163] env[62816]: DEBUG oslo.service.loopingcall [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1435.433380] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1435.433544] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2e02a77-bc73-4523-b16e-eb2cf27df9e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.453245] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1435.453245] env[62816]: value = "task-1788096" [ 1435.453245] env[62816]: _type = "Task" [ 1435.453245] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.461087] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788096, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.540616] env[62816]: INFO nova.compute.manager [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Took 1.52 seconds to deallocate network for instance. [ 1435.547675] env[62816]: DEBUG oslo_concurrency.lockutils [req-afd0360a-66da-4570-9108-0189cdbe2f7d req-15523cda-7c43-4fab-a7dc-1958b9b1e528 service nova] Releasing lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.669749] env[62816]: DEBUG oslo_concurrency.lockutils [req-0cee75c7-2c38-42c3-a26d-701b5761e2d1 req-4ec66ebb-846e-4f3b-9c72-9e29fd993e5a service nova] Releasing lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1435.670333] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.670513] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1435.728022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "914b187f-b05f-49d4-bf61-d536ef61934d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.728022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "914b187f-b05f-49d4-bf61-d536ef61934d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.728022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "914b187f-b05f-49d4-bf61-d536ef61934d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1435.728022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "914b187f-b05f-49d4-bf61-d536ef61934d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1435.728306] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "914b187f-b05f-49d4-bf61-d536ef61934d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.729839] env[62816]: INFO nova.compute.manager [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Terminating instance [ 1435.733319] env[62816]: DEBUG nova.compute.manager [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1435.733788] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1435.734381] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be2ffdd2-c7d8-4d69-b567-aa5a580d0373 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.742973] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.742973] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70098cc3-9776-48d4-9588-73c12240872e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.751074] env[62816]: DEBUG oslo_vmware.api [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1435.751074] env[62816]: value = "task-1788097" [ 1435.751074] env[62816]: _type = "Task" [ 1435.751074] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.764704] env[62816]: DEBUG oslo_vmware.api [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788097, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.776554] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3da446d5-d994-45a1-8387-87e6e7f2efcc tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "7be4c8f8-240c-4a71-93bb-aeb94243d781" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.243s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.941303] env[62816]: DEBUG nova.network.neutron [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Successfully updated port: 8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1435.965814] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788096, 'name': CreateVM_Task, 'duration_secs': 0.33258} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.965814] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1435.965814] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.965814] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.965814] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1435.966287] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1a64fcc-4e10-4560-8232-d1a051cc18d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.970750] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1435.970750] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f2abae-1060-d5f1-5f12-2de7290fe874" [ 1435.970750] env[62816]: _type = "Task" [ 1435.970750] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.979030] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f2abae-1060-d5f1-5f12-2de7290fe874, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.048295] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.162879] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updated VIF entry in instance network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1436.163834] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.184269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.209079] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1436.267290] env[62816]: DEBUG oslo_vmware.api [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788097, 'name': PowerOffVM_Task, 'duration_secs': 0.17275} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.267290] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1436.267290] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1436.267290] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6a9adf7-3a05-4bd4-b0c0-96d5e29a9907 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.338113] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1436.338343] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1436.338530] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Deleting the datastore file [datastore1] 914b187f-b05f-49d4-bf61-d536ef61934d {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1436.338777] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3355439d-e38f-4fe5-8f42-43cba3b08987 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.344894] env[62816]: DEBUG oslo_vmware.api [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for the task: (returnval){ [ 1436.344894] env[62816]: value = "task-1788099" [ 1436.344894] env[62816]: _type = "Task" [ 1436.344894] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.352158] env[62816]: DEBUG oslo_vmware.api [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.446599] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "refresh_cache-cf6ff174-1324-42bd-a77a-905b9a333c27" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.447718] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "refresh_cache-cf6ff174-1324-42bd-a77a-905b9a333c27" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.447718] env[62816]: DEBUG nova.network.neutron [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.483201] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f2abae-1060-d5f1-5f12-2de7290fe874, 'name': SearchDatastore_Task, 'duration_secs': 0.010759} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.483824] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.483971] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1436.484218] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.484394] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.484581] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1436.484847] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4b22477-5b5c-4dd7-8855-a025dc7fd751 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.493195] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1436.493380] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1436.494106] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2344622c-f41e-4f5b-b005-4b3e5bed2e43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.500283] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1436.500283] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e23996-6f41-38a4-009a-396e264f2190" [ 1436.500283] env[62816]: _type = "Task" [ 1436.500283] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.509321] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e23996-6f41-38a4-009a-396e264f2190, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.661186] env[62816]: DEBUG nova.compute.manager [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Received event network-vif-deleted-ffa08ef1-95a6-4f8b-b323-b76c08d6e671 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1436.661393] env[62816]: DEBUG nova.compute.manager [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-changed-27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1436.661552] env[62816]: DEBUG nova.compute.manager [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Refreshing instance network info cache due to event network-changed-27b70ae2-92a3-40bb-b6d0-5a06e860d0ad. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1436.662227] env[62816]: DEBUG oslo_concurrency.lockutils [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] Acquiring lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.667701] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Releasing lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1436.667925] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Received event network-changed-cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1436.668106] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Refreshing instance network info cache due to event network-changed-cd98f4df-d678-4280-8111-86d76a117d36. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1436.668299] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquiring lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.668439] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquired lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.668600] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Refreshing network info cache for port cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1436.736049] env[62816]: DEBUG nova.network.neutron [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updating instance_info_cache with network_info: [{"id": "2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce", "address": "fa:16:3e:8c:2d:df", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff6f344-ea", "ovs_interfaceid": "2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "address": "fa:16:3e:8f:c4:4d", "network": {"id": "e7634fcd-3acc-4f56-8b7d-685211ca51cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717224539", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c5cba6-8e", "ovs_interfaceid": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "address": "fa:16:3e:d9:67:7b", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27b70ae2-92", "ovs_interfaceid": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1436.756029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.756210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.756416] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1436.756599] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1436.756766] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1436.758755] env[62816]: INFO nova.compute.manager [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Terminating instance [ 1436.760351] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "refresh_cache-f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1436.760508] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquired lock "refresh_cache-f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1436.760673] env[62816]: DEBUG nova.network.neutron [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1436.779640] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee4b761-a1e2-4074-8148-a2a84d5e16e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.787409] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ca7040-fc17-4d77-a592-dd0858f9c9e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.820024] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f954e62-54b0-45bf-a495-8ae2cbf6c4f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.827106] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d85540f5-c601-4667-940d-463d6ce10875 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.841239] env[62816]: DEBUG nova.compute.provider_tree [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.854522] env[62816]: DEBUG oslo_vmware.api [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Task: {'id': task-1788099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142117} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.854770] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1436.854962] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1436.855144] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1436.855315] env[62816]: INFO nova.compute.manager [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1436.855544] env[62816]: DEBUG oslo.service.loopingcall [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1436.855723] env[62816]: DEBUG nova.compute.manager [-] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1436.855817] env[62816]: DEBUG nova.network.neutron [-] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1436.985487] env[62816]: DEBUG nova.network.neutron [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1437.014715] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e23996-6f41-38a4-009a-396e264f2190, 'name': SearchDatastore_Task, 'duration_secs': 0.009339} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.015828] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1a66bf4-7ad6-49cc-a6e6-54a8c3a5d755 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.022879] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1437.022879] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5270cfad-12b7-c807-ae9f-9327e27c11ec" [ 1437.022879] env[62816]: _type = "Task" [ 1437.022879] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.033612] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5270cfad-12b7-c807-ae9f-9327e27c11ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.175145] env[62816]: DEBUG nova.network.neutron [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Updating instance_info_cache with network_info: [{"id": "8cb4152c-bb70-4c55-b65d-1d1990432a62", "address": "fa:16:3e:39:15:35", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cb4152c-bb", "ovs_interfaceid": "8cb4152c-bb70-4c55-b65d-1d1990432a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.238797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Releasing lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.239251] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance network_info: |[{"id": "2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce", "address": "fa:16:3e:8c:2d:df", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff6f344-ea", "ovs_interfaceid": "2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "address": "fa:16:3e:8f:c4:4d", "network": {"id": "e7634fcd-3acc-4f56-8b7d-685211ca51cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717224539", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c5cba6-8e", "ovs_interfaceid": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "address": "fa:16:3e:d9:67:7b", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27b70ae2-92", "ovs_interfaceid": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1437.239562] env[62816]: DEBUG oslo_concurrency.lockutils [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] Acquired lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.239743] env[62816]: DEBUG nova.network.neutron [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Refreshing network info cache for port 27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1437.241022] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:2d:df', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dabbac20-1723-40ad-9da0-e53b28073651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:c4:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1880df72-582c-44cb-992d-88dc6a514914', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54c5cba6-8e06-4d70-bb84-fc0420096ff8', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:67:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dabbac20-1723-40ad-9da0-e53b28073651', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27b70ae2-92a3-40bb-b6d0-5a06e860d0ad', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.251824] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Creating folder: Project (8e20c8f5bdd64f1d89157aa0b947431e). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.257397] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ffc2ce35-3a51-4394-9885-301271b3f974 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.268848] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Created folder: Project (8e20c8f5bdd64f1d89157aa0b947431e) in parent group-v370905. [ 1437.269052] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Creating folder: Instances. Parent ref: group-v370976. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1437.269280] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61cd30ca-0bd7-412e-9e62-9750219e76dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.278171] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Created folder: Instances in parent group-v370976. [ 1437.278494] env[62816]: DEBUG oslo.service.loopingcall [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.280383] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1437.280597] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d0ba935d-f416-403a-bba2-8a3bad0ba3b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.299779] env[62816]: DEBUG nova.network.neutron [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1437.308985] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.308985] env[62816]: value = "task-1788102" [ 1437.308985] env[62816]: _type = "Task" [ 1437.308985] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.316383] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788102, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.344454] env[62816]: DEBUG nova.scheduler.client.report [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1437.397733] env[62816]: DEBUG nova.network.neutron [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.429156] env[62816]: DEBUG nova.compute.manager [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Received event network-vif-plugged-8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.429371] env[62816]: DEBUG oslo_concurrency.lockutils [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] Acquiring lock "cf6ff174-1324-42bd-a77a-905b9a333c27-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.429584] env[62816]: DEBUG oslo_concurrency.lockutils [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.429754] env[62816]: DEBUG oslo_concurrency.lockutils [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.429940] env[62816]: DEBUG nova.compute.manager [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] No waiting events found dispatching network-vif-plugged-8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1437.430128] env[62816]: WARNING nova.compute.manager [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Received unexpected event network-vif-plugged-8cb4152c-bb70-4c55-b65d-1d1990432a62 for instance with vm_state building and task_state spawning. [ 1437.430288] env[62816]: DEBUG nova.compute.manager [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Received event network-changed-8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.430439] env[62816]: DEBUG nova.compute.manager [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Refreshing instance network info cache due to event network-changed-8cb4152c-bb70-4c55-b65d-1d1990432a62. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1437.430600] env[62816]: DEBUG oslo_concurrency.lockutils [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] Acquiring lock "refresh_cache-cf6ff174-1324-42bd-a77a-905b9a333c27" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.458906] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Updated VIF entry in instance network info cache for port cd98f4df-d678-4280-8111-86d76a117d36. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1437.459258] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Updating instance_info_cache with network_info: [{"id": "cd98f4df-d678-4280-8111-86d76a117d36", "address": "fa:16:3e:92:19:df", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd98f4df-d6", "ovs_interfaceid": "cd98f4df-d678-4280-8111-86d76a117d36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.490224] env[62816]: DEBUG nova.network.neutron [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updated VIF entry in instance network info cache for port 27b70ae2-92a3-40bb-b6d0-5a06e860d0ad. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1437.490712] env[62816]: DEBUG nova.network.neutron [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updating instance_info_cache with network_info: [{"id": "2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce", "address": "fa:16:3e:8c:2d:df", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.101", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ff6f344-ea", "ovs_interfaceid": "2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "address": "fa:16:3e:8f:c4:4d", "network": {"id": "e7634fcd-3acc-4f56-8b7d-685211ca51cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717224539", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c5cba6-8e", "ovs_interfaceid": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "address": "fa:16:3e:d9:67:7b", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27b70ae2-92", "ovs_interfaceid": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.533652] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5270cfad-12b7-c807-ae9f-9327e27c11ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010423} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.533917] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.534187] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 42093232-a4e5-4cc3-ab1c-a0023a91e102/42093232-a4e5-4cc3-ab1c-a0023a91e102.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1437.534514] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0dbb9684-d186-411b-9175-bf75eefdc547 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.541448] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1437.541448] env[62816]: value = "task-1788103" [ 1437.541448] env[62816]: _type = "Task" [ 1437.541448] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.548983] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.651663] env[62816]: DEBUG nova.network.neutron [-] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.678385] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "refresh_cache-cf6ff174-1324-42bd-a77a-905b9a333c27" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.678723] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Instance network_info: |[{"id": "8cb4152c-bb70-4c55-b65d-1d1990432a62", "address": "fa:16:3e:39:15:35", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cb4152c-bb", "ovs_interfaceid": "8cb4152c-bb70-4c55-b65d-1d1990432a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1437.679030] env[62816]: DEBUG oslo_concurrency.lockutils [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] Acquired lock "refresh_cache-cf6ff174-1324-42bd-a77a-905b9a333c27" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.679220] env[62816]: DEBUG nova.network.neutron [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Refreshing network info cache for port 8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1437.680722] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:15:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ed91b7b-b4ec-486d-ab34-af0afb7ec691', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8cb4152c-bb70-4c55-b65d-1d1990432a62', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1437.688302] env[62816]: DEBUG oslo.service.loopingcall [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1437.691505] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1437.692223] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3419e17e-ab06-4be5-8a25-dc55fcb7de35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.713849] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1437.713849] env[62816]: value = "task-1788104" [ 1437.713849] env[62816]: _type = "Task" [ 1437.713849] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.722835] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788104, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.818812] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788102, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.849205] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.850112] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1437.853017] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.324s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.854501] env[62816]: INFO nova.compute.claims [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1437.901219] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Releasing lock "refresh_cache-f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.901372] env[62816]: DEBUG nova.compute.manager [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1437.901562] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1437.902723] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cdec0e-4937-4b29-9186-06a7a9879ba8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.911367] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1437.911648] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0cd89c75-62c5-4605-80dd-fe5b4587f99d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.919455] env[62816]: DEBUG oslo_vmware.api [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1437.919455] env[62816]: value = "task-1788105" [ 1437.919455] env[62816]: _type = "Task" [ 1437.919455] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.927941] env[62816]: DEBUG oslo_vmware.api [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1788105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.962513] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Releasing lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.962903] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.963325] env[62816]: DEBUG nova.compute.manager [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing instance network info cache due to event network-changed-1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1437.963798] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquiring lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.964148] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Acquired lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.964534] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Refreshing network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1437.984263] env[62816]: DEBUG nova.network.neutron [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Updated VIF entry in instance network info cache for port 8cb4152c-bb70-4c55-b65d-1d1990432a62. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1437.984705] env[62816]: DEBUG nova.network.neutron [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Updating instance_info_cache with network_info: [{"id": "8cb4152c-bb70-4c55-b65d-1d1990432a62", "address": "fa:16:3e:39:15:35", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8cb4152c-bb", "ovs_interfaceid": "8cb4152c-bb70-4c55-b65d-1d1990432a62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1437.993564] env[62816]: DEBUG oslo_concurrency.lockutils [req-38f5dca8-c169-4ee1-9f7a-626baa4432be req-104e7279-e5e3-44f0-8e79-82ba346be884 service nova] Releasing lock "refresh_cache-0c5c5c06-0b5e-4e11-84b5-ca76828a0565" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.052149] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788103, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.154062] env[62816]: INFO nova.compute.manager [-] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Took 1.30 seconds to deallocate network for instance. [ 1438.225581] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788104, 'name': CreateVM_Task, 'duration_secs': 0.37735} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.226627] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1438.226627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.226627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.226895] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1438.227183] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bfd3a48-eb8e-4dbb-b161-68615b7f999a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.232555] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1438.232555] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a5a012-b9a5-ffd3-a547-1098b57e29cc" [ 1438.232555] env[62816]: _type = "Task" [ 1438.232555] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.240710] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a5a012-b9a5-ffd3-a547-1098b57e29cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.320828] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788102, 'name': CreateVM_Task, 'duration_secs': 0.938699} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.321017] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1438.321816] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.358587] env[62816]: DEBUG nova.compute.utils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1438.359909] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1438.360063] env[62816]: DEBUG nova.network.neutron [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1438.403024] env[62816]: DEBUG nova.policy [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1438.427971] env[62816]: DEBUG oslo_vmware.api [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1788105, 'name': PowerOffVM_Task, 'duration_secs': 0.23145} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.428288] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1438.428467] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1438.428703] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21f5d7b8-fad0-4273-91ab-8f6884a0c929 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.454393] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1438.454648] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1438.454832] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleting the datastore file [datastore1] f06102d6-be5c-40d1-ae1d-8ae8190fd0d7 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1438.455104] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a6d3dfa5-ff2f-40b2-a79d-1c2eafc28f6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.461790] env[62816]: DEBUG oslo_vmware.api [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for the task: (returnval){ [ 1438.461790] env[62816]: value = "task-1788107" [ 1438.461790] env[62816]: _type = "Task" [ 1438.461790] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.471606] env[62816]: DEBUG oslo_vmware.api [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1788107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.487620] env[62816]: DEBUG oslo_concurrency.lockutils [req-e21e72cf-3924-4436-8aa8-298074c06572 req-45a696da-d3b8-40b5-855a-2e23a68923fa service nova] Releasing lock "refresh_cache-cf6ff174-1324-42bd-a77a-905b9a333c27" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.552795] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788103, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536433} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.553078] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 42093232-a4e5-4cc3-ab1c-a0023a91e102/42093232-a4e5-4cc3-ab1c-a0023a91e102.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1438.553294] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1438.553540] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1242dd74-c675-4b0a-85dd-9b35ef5d3487 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.559364] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1438.559364] env[62816]: value = "task-1788108" [ 1438.559364] env[62816]: _type = "Task" [ 1438.559364] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.567233] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788108, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.662135] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.690608] env[62816]: DEBUG nova.network.neutron [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Successfully created port: 51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.694864] env[62816]: DEBUG nova.compute.manager [req-f21bd059-b984-4585-b48c-e1dca864e93d req-6d83967c-eea1-40f0-bff6-558420a6a6cc service nova] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Received event network-vif-deleted-0122f844-9db0-479d-adad-20dd495d1aa0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1438.707284] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updated VIF entry in instance network info cache for port 1443be92-279c-4376-8c5d-2dff1bb3f82f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1438.707624] env[62816]: DEBUG nova.network.neutron [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [{"id": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "address": "fa:16:3e:ea:79:c1", "network": {"id": "b4fbaa11-d9aa-4537-9526-0e49ba02abf0", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-528261659-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2f09a23020874a6798ef4d132f6ec845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4fb94adb-cc41-4c16-9830-a3205dbd2bf5", "external-id": "nsx-vlan-transportzone-100", "segmentation_id": 100, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1443be92-27", "ovs_interfaceid": "1443be92-279c-4376-8c5d-2dff1bb3f82f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.742951] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a5a012-b9a5-ffd3-a547-1098b57e29cc, 'name': SearchDatastore_Task, 'duration_secs': 0.008428} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.743374] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.743604] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.743834] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.743992] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.744183] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1438.744463] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1438.744759] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1438.744977] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b36a74e-5734-46f6-af0d-53672d0b756f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.746818] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9485e9aa-f589-4b16-a8f9-a02910e5020e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.752122] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1438.752122] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b6faa0-10d6-e87a-4f66-c5f7e78b9bda" [ 1438.752122] env[62816]: _type = "Task" [ 1438.752122] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.756737] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1438.756917] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1438.758057] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79e104db-f9a2-4c9b-923d-996fa147ae31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.763765] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b6faa0-10d6-e87a-4f66-c5f7e78b9bda, 'name': SearchDatastore_Task} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.764333] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1438.764556] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1438.764754] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1438.766673] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1438.766673] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52af29e0-50a9-9d14-b034-5d6aefd5e482" [ 1438.766673] env[62816]: _type = "Task" [ 1438.766673] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.773837] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52af29e0-50a9-9d14-b034-5d6aefd5e482, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.866691] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1438.972413] env[62816]: DEBUG oslo_vmware.api [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Task: {'id': task-1788107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086058} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.972666] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1438.972843] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1438.973074] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1438.973225] env[62816]: INFO nova.compute.manager [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1438.973473] env[62816]: DEBUG oslo.service.loopingcall [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1438.975854] env[62816]: DEBUG nova.compute.manager [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1438.975950] env[62816]: DEBUG nova.network.neutron [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1439.005440] env[62816]: DEBUG nova.network.neutron [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1439.072358] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788108, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.115568} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.072628] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1439.073419] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e0b291-56ce-4f6f-aca1-14bd42226a82 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.097965] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 42093232-a4e5-4cc3-ab1c-a0023a91e102/42093232-a4e5-4cc3-ab1c-a0023a91e102.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1439.100323] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23f52c4f-3047-47dc-97b4-b1575f929e72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.119448] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1439.119448] env[62816]: value = "task-1788109" [ 1439.119448] env[62816]: _type = "Task" [ 1439.119448] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.129695] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788109, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.209949] env[62816]: DEBUG oslo_concurrency.lockutils [req-66b8cc36-9398-486e-87be-e5b9e9ca0a1a req-ef4ce24a-7710-4528-896d-179ec6d6a96e service nova] Releasing lock "refresh_cache-99bd7579-7097-41df-a8c0-e12a3863a3dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.277063] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52af29e0-50a9-9d14-b034-5d6aefd5e482, 'name': SearchDatastore_Task, 'duration_secs': 0.007445} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.280735] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-740dd059-9a40-496e-8c21-07c2b7d3cf4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.286684] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1439.286684] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526a9d55-ba4d-2600-510e-a44dd541debb" [ 1439.286684] env[62816]: _type = "Task" [ 1439.286684] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.297745] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526a9d55-ba4d-2600-510e-a44dd541debb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.418978] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a2bf7b-802d-4483-b7d7-5bcd5ec291b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.426954] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170a62f9-19dc-40bd-af88-cf3fc5a585ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.456066] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65be5894-de42-4672-a328-3e017606a985 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.462870] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914e6d15-1158-474f-8773-d190b5f0e47a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.477165] env[62816]: DEBUG nova.compute.provider_tree [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.508966] env[62816]: DEBUG nova.network.neutron [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1439.628756] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788109, 'name': ReconfigVM_Task, 'duration_secs': 0.262353} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.629027] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 42093232-a4e5-4cc3-ab1c-a0023a91e102/42093232-a4e5-4cc3-ab1c-a0023a91e102.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1439.629638] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6ac49137-25af-4579-a3fe-1ba431970032 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.635423] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1439.635423] env[62816]: value = "task-1788110" [ 1439.635423] env[62816]: _type = "Task" [ 1439.635423] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.642729] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788110, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.797101] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526a9d55-ba4d-2600-510e-a44dd541debb, 'name': SearchDatastore_Task, 'duration_secs': 0.009159} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.797218] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.797467] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] cf6ff174-1324-42bd-a77a-905b9a333c27/cf6ff174-1324-42bd-a77a-905b9a333c27.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1439.797739] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.797931] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1439.798153] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f844da3b-ec50-465b-911f-6a6655c756ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.799955] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cb8d48b-4c31-445c-b5bc-8c340319c2cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.805837] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1439.805837] env[62816]: value = "task-1788111" [ 1439.805837] env[62816]: _type = "Task" [ 1439.805837] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.809313] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1439.809489] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1439.810445] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66ffb4e7-51b7-4bc0-aae1-235c48fc1a2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.815153] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788111, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.817997] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1439.817997] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529ce001-72f6-32ae-7545-533f6b6b2c6d" [ 1439.817997] env[62816]: _type = "Task" [ 1439.817997] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.824847] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ce001-72f6-32ae-7545-533f6b6b2c6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.884366] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1439.908970] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1439.909280] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1439.909500] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.909745] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1439.909911] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.910197] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1439.910466] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1439.911065] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1439.911065] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1439.911182] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1439.911325] env[62816]: DEBUG nova.virt.hardware [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1439.912435] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa8b89c-f46f-46dc-8964-6b84cfb6fd4b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.921452] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1be2a9-a7aa-419f-a252-644a92ada86b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.980614] env[62816]: DEBUG nova.scheduler.client.report [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1440.010467] env[62816]: INFO nova.compute.manager [-] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Took 1.03 seconds to deallocate network for instance. [ 1440.149785] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788110, 'name': Rename_Task, 'duration_secs': 0.135483} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.150193] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1440.150502] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45b7ea56-442f-4d66-8ea8-2617b14f029d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.157756] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1440.157756] env[62816]: value = "task-1788112" [ 1440.157756] env[62816]: _type = "Task" [ 1440.157756] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.166605] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788112, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.296154] env[62816]: DEBUG nova.compute.manager [req-44fef5a5-424b-4488-a0bd-49311f3c5307 req-c7dd5d7b-a1a6-4180-ace6-2f883e3da25e service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-vif-plugged-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1440.296154] env[62816]: DEBUG oslo_concurrency.lockutils [req-44fef5a5-424b-4488-a0bd-49311f3c5307 req-c7dd5d7b-a1a6-4180-ace6-2f883e3da25e service nova] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.296154] env[62816]: DEBUG oslo_concurrency.lockutils [req-44fef5a5-424b-4488-a0bd-49311f3c5307 req-c7dd5d7b-a1a6-4180-ace6-2f883e3da25e service nova] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.296154] env[62816]: DEBUG oslo_concurrency.lockutils [req-44fef5a5-424b-4488-a0bd-49311f3c5307 req-c7dd5d7b-a1a6-4180-ace6-2f883e3da25e service nova] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.296154] env[62816]: DEBUG nova.compute.manager [req-44fef5a5-424b-4488-a0bd-49311f3c5307 req-c7dd5d7b-a1a6-4180-ace6-2f883e3da25e service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] No waiting events found dispatching network-vif-plugged-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1440.296154] env[62816]: WARNING nova.compute.manager [req-44fef5a5-424b-4488-a0bd-49311f3c5307 req-c7dd5d7b-a1a6-4180-ace6-2f883e3da25e service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received unexpected event network-vif-plugged-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a for instance with vm_state building and task_state spawning. [ 1440.318051] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788111, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494197} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.318570] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] cf6ff174-1324-42bd-a77a-905b9a333c27/cf6ff174-1324-42bd-a77a-905b9a333c27.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1440.320022] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1440.322222] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20e4dbfc-d867-45a8-bdbc-82f166660a14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.334111] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ce001-72f6-32ae-7545-533f6b6b2c6d, 'name': SearchDatastore_Task, 'duration_secs': 0.00821} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.334111] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1440.334111] env[62816]: value = "task-1788113" [ 1440.334111] env[62816]: _type = "Task" [ 1440.334111] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.334111] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6aeae35f-a622-45a9-9faa-670aad7df689 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.343329] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1440.343329] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526287b9-4aa2-728f-83b1-017b1eecacbc" [ 1440.343329] env[62816]: _type = "Task" [ 1440.343329] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.347629] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788113, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.356287] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526287b9-4aa2-728f-83b1-017b1eecacbc, 'name': SearchDatastore_Task, 'duration_secs': 0.007943} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.356736] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.357056] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0c5c5c06-0b5e-4e11-84b5-ca76828a0565/0c5c5c06-0b5e-4e11-84b5-ca76828a0565.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.357348] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db83c0e9-f0c8-4c96-8195-2de8f75e9349 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.364465] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1440.364465] env[62816]: value = "task-1788114" [ 1440.364465] env[62816]: _type = "Task" [ 1440.364465] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.372618] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.423379] env[62816]: DEBUG nova.network.neutron [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Successfully updated port: 51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1440.486647] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.634s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.487203] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1440.489712] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.524s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.489986] env[62816]: DEBUG nova.objects.instance [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lazy-loading 'resources' on Instance uuid de33d02f-7e34-4619-a2ed-cda6c54aa030 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.517640] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.670269] env[62816]: DEBUG oslo_vmware.api [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788112, 'name': PowerOnVM_Task, 'duration_secs': 0.498563} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.670640] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.670897] env[62816]: INFO nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1440.671128] env[62816]: DEBUG nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.671989] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a222b533-c7ea-42c3-8e57-88577c4c087d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.845289] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788113, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058608} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.845575] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1440.846653] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe973f13-ca12-44fc-9677-1f6a622d4538 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.869160] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] cf6ff174-1324-42bd-a77a-905b9a333c27/cf6ff174-1324-42bd-a77a-905b9a333c27.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1440.869549] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38edd337-de55-4c58-91dc-a13664b7c560 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.891020] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788114, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493926} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.892229] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0c5c5c06-0b5e-4e11-84b5-ca76828a0565/0c5c5c06-0b5e-4e11-84b5-ca76828a0565.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1440.892452] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1440.892747] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1440.892747] env[62816]: value = "task-1788115" [ 1440.892747] env[62816]: _type = "Task" [ 1440.892747] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.892928] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b01931a9-ad5c-4fe8-9a26-dcab5ce269d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.903026] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788115, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.903606] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1440.903606] env[62816]: value = "task-1788116" [ 1440.903606] env[62816]: _type = "Task" [ 1440.903606] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.915194] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788116, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.928117] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.929703] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.929703] env[62816]: DEBUG nova.network.neutron [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.993576] env[62816]: DEBUG nova.compute.utils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1440.995932] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1440.996240] env[62816]: DEBUG nova.network.neutron [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1441.045958] env[62816]: DEBUG nova.policy [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21ed3abad90741799db9f998a15c7787', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f016ab6a03848ba8014647f483f0b92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1441.193548] env[62816]: INFO nova.compute.manager [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Took 39.67 seconds to build instance. [ 1441.353259] env[62816]: DEBUG nova.network.neutron [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Successfully created port: b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1441.405810] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788115, 'name': ReconfigVM_Task, 'duration_secs': 0.26465} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.408717] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Reconfigured VM instance instance-00000017 to attach disk [datastore1] cf6ff174-1324-42bd-a77a-905b9a333c27/cf6ff174-1324-42bd-a77a-905b9a333c27.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1441.409342] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6fc6d9f-7b2c-4522-834f-dea475addafd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.418143] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788116, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077755} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.420112] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1441.420485] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1441.420485] env[62816]: value = "task-1788117" [ 1441.420485] env[62816]: _type = "Task" [ 1441.420485] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.421169] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff2d7d1-c7c6-4f96-91b1-f2d5dcc3d80a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.456859] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 0c5c5c06-0b5e-4e11-84b5-ca76828a0565/0c5c5c06-0b5e-4e11-84b5-ca76828a0565.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1441.460347] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d5a326f-2091-4c1f-92db-bc399b09b7ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.475528] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788117, 'name': Rename_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.483632] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1441.483632] env[62816]: value = "task-1788118" [ 1441.483632] env[62816]: _type = "Task" [ 1441.483632] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.493504] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788118, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.496099] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1441.529591] env[62816]: DEBUG nova.network.neutron [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1441.581807] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fadc4b-8326-4f33-b64a-3c4631475b94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.592321] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20d1f92-a5e5-4a41-8b9d-ee1d7318d88f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.630528] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9949bad0-a1df-4059-95e7-2ee4415a3594 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.638168] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024cf065-7857-4768-9ece-dc24a3e1435e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.652606] env[62816]: DEBUG nova.compute.provider_tree [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.695639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bed82b31-ea7d-45a1-8533-0362615efc4d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.965s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.730142] env[62816]: DEBUG nova.network.neutron [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.933595] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788117, 'name': Rename_Task, 'duration_secs': 0.141366} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.933892] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1441.934165] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb39feda-0047-4912-a696-58fadba8f483 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.940785] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1441.940785] env[62816]: value = "task-1788119" [ 1441.940785] env[62816]: _type = "Task" [ 1441.940785] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.948918] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.993266] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788118, 'name': ReconfigVM_Task, 'duration_secs': 0.29948} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.993555] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 0c5c5c06-0b5e-4e11-84b5-ca76828a0565/0c5c5c06-0b5e-4e11-84b5-ca76828a0565.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1441.994484] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a598be9a-634d-4196-96dd-e7562093511a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.004114] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1442.004114] env[62816]: value = "task-1788120" [ 1442.004114] env[62816]: _type = "Task" [ 1442.004114] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.015279] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788120, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.157482] env[62816]: DEBUG nova.scheduler.client.report [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.198435] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1442.232826] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.232954] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Instance network_info: |[{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1442.233658] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:b1:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '51d24096-dc5e-4a89-a26a-e0cf4eb85e6a', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1442.241617] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Creating folder: Project (3d3fae79b00d494daaadfee718781379). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.242638] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6462dba-e9f8-4dde-9005-7b8f0a28f6a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.253699] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Created folder: Project (3d3fae79b00d494daaadfee718781379) in parent group-v370905. [ 1442.253912] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Creating folder: Instances. Parent ref: group-v370980. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.254165] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d3651eb-1a44-4fda-82a7-474ccdaf5138 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.263170] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Created folder: Instances in parent group-v370980. [ 1442.263422] env[62816]: DEBUG oslo.service.loopingcall [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1442.263609] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1442.263800] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09c07cef-8b07-4dbb-ac34-1bf8adc5fea3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.285124] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1442.285124] env[62816]: value = "task-1788123" [ 1442.285124] env[62816]: _type = "Task" [ 1442.285124] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.293079] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788123, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.358277] env[62816]: DEBUG nova.compute.manager [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-changed-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1442.358277] env[62816]: DEBUG nova.compute.manager [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Refreshing instance network info cache due to event network-changed-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1442.358277] env[62816]: DEBUG oslo_concurrency.lockutils [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] Acquiring lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.358277] env[62816]: DEBUG oslo_concurrency.lockutils [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] Acquired lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.358419] env[62816]: DEBUG nova.network.neutron [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Refreshing network info cache for port 51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.452962] env[62816]: DEBUG oslo_vmware.api [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788119, 'name': PowerOnVM_Task, 'duration_secs': 0.441619} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.453266] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1442.453469] env[62816]: INFO nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Took 7.51 seconds to spawn the instance on the hypervisor. [ 1442.453679] env[62816]: DEBUG nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1442.454439] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e5645e-cfdc-493d-bd85-befab821048c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.506596] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1442.518280] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788120, 'name': Rename_Task, 'duration_secs': 0.145547} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.518596] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1442.518847] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-40ab47f1-2176-4348-953c-e34c60442574 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.525390] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1442.525390] env[62816]: value = "task-1788124" [ 1442.525390] env[62816]: _type = "Task" [ 1442.525390] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.533390] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1442.533646] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1442.533805] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.534052] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1442.534147] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.534304] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1442.534534] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1442.534700] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1442.535303] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1442.535303] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1442.535303] env[62816]: DEBUG nova.virt.hardware [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1442.535969] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b1eaf09-41f1-4af1-a465-8c9993692810 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.542015] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788124, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.547169] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab7c3e0-f4c6-4be3-bb4a-d26171b26abe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.663364] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.665848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.571s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.666196] env[62816]: DEBUG nova.objects.instance [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1442.688475] env[62816]: INFO nova.scheduler.client.report [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Deleted allocations for instance de33d02f-7e34-4619-a2ed-cda6c54aa030 [ 1442.727032] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1442.794770] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788123, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.971523] env[62816]: INFO nova.compute.manager [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Took 39.85 seconds to build instance. [ 1443.003987] env[62816]: DEBUG nova.network.neutron [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Successfully updated port: b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1443.040246] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788124, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.156636] env[62816]: DEBUG nova.network.neutron [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updated VIF entry in instance network info cache for port 51d24096-dc5e-4a89-a26a-e0cf4eb85e6a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1443.157013] env[62816]: DEBUG nova.network.neutron [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.201107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a362cb69-5681-4c45-a2e8-37c3aee4abe2 tempest-TenantUsagesTestJSON-697338855 tempest-TenantUsagesTestJSON-697338855-project-member] Lock "de33d02f-7e34-4619-a2ed-cda6c54aa030" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.612s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.296251] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788123, 'name': CreateVM_Task, 'duration_secs': 0.531205} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.296516] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1443.297189] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.297463] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.297723] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1443.298186] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-425d5592-a100-4e18-9273-17e4c64479bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.303171] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1443.303171] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52894fcb-3262-dcac-148f-260974504edc" [ 1443.303171] env[62816]: _type = "Task" [ 1443.303171] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.311994] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52894fcb-3262-dcac-148f-260974504edc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.473910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6af5bbd2-f6a7-4f86-b8d0-67bbcc9ce15d tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.108s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.507435] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "refresh_cache-0e0261fe-4376-487c-9d54-c4f37577409c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.507604] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "refresh_cache-0e0261fe-4376-487c-9d54-c4f37577409c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.507722] env[62816]: DEBUG nova.network.neutron [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1443.538669] env[62816]: DEBUG oslo_vmware.api [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788124, 'name': PowerOnVM_Task, 'duration_secs': 0.58248} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.539120] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1443.539449] env[62816]: INFO nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Took 16.54 seconds to spawn the instance on the hypervisor. [ 1443.539882] env[62816]: DEBUG nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1443.540941] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a16772-8a5f-4dda-a5ad-2681248560b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.660747] env[62816]: DEBUG oslo_concurrency.lockutils [req-590e492f-f558-44d9-9e93-99e83d22b642 req-d756a39e-aa7f-4d39-8049-574aca3a9837 service nova] Releasing lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.676185] env[62816]: DEBUG oslo_concurrency.lockutils [None req-94b8f9ca-5a6d-4f1e-bb49-e599ce61296c tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.676575] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.775s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.679260] env[62816]: INFO nova.compute.claims [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1443.817165] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52894fcb-3262-dcac-148f-260974504edc, 'name': SearchDatastore_Task, 'duration_secs': 0.011263} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.817165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.817165] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1443.817581] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.817918] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.818231] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1443.818656] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d0fff92-e606-4988-958a-fab2b7fb6c2e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.833837] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1443.837205] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1443.837205] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0d3103d-e159-40b9-b216-d48acaea09ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.842653] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1443.842653] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5296a1fb-c0c7-33e0-12d0-78399521bd45" [ 1443.842653] env[62816]: _type = "Task" [ 1443.842653] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.851718] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5296a1fb-c0c7-33e0-12d0-78399521bd45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.977858] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1444.045871] env[62816]: DEBUG nova.network.neutron [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1444.064288] env[62816]: INFO nova.compute.manager [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Took 42.53 seconds to build instance. [ 1444.355024] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5296a1fb-c0c7-33e0-12d0-78399521bd45, 'name': SearchDatastore_Task, 'duration_secs': 0.037981} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.356069] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-451bb7d7-084f-4bb4-82ff-2666c0fa1508 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.362493] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1444.362493] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ae09bd-7e51-c9fe-0132-327a31acb9e3" [ 1444.362493] env[62816]: _type = "Task" [ 1444.362493] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.370739] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ae09bd-7e51-c9fe-0132-327a31acb9e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.447867] env[62816]: DEBUG nova.network.neutron [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Updating instance_info_cache with network_info: [{"id": "b2b6a990-a634-4e68-ba4c-886b856209a5", "address": "fa:16:3e:df:cb:24", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b6a990-a6", "ovs_interfaceid": "b2b6a990-a634-4e68-ba4c-886b856209a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1444.506976] env[62816]: DEBUG nova.compute.manager [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Received event network-vif-plugged-b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1444.506976] env[62816]: DEBUG oslo_concurrency.lockutils [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] Acquiring lock "0e0261fe-4376-487c-9d54-c4f37577409c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.506976] env[62816]: DEBUG oslo_concurrency.lockutils [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] Lock "0e0261fe-4376-487c-9d54-c4f37577409c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.507515] env[62816]: DEBUG oslo_concurrency.lockutils [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] Lock "0e0261fe-4376-487c-9d54-c4f37577409c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.507515] env[62816]: DEBUG nova.compute.manager [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] No waiting events found dispatching network-vif-plugged-b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1444.507515] env[62816]: WARNING nova.compute.manager [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Received unexpected event network-vif-plugged-b2b6a990-a634-4e68-ba4c-886b856209a5 for instance with vm_state building and task_state spawning. [ 1444.507613] env[62816]: DEBUG nova.compute.manager [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Received event network-changed-b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1444.507703] env[62816]: DEBUG nova.compute.manager [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Refreshing instance network info cache due to event network-changed-b2b6a990-a634-4e68-ba4c-886b856209a5. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1444.508042] env[62816]: DEBUG oslo_concurrency.lockutils [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] Acquiring lock "refresh_cache-0e0261fe-4376-487c-9d54-c4f37577409c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.508834] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.566552] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5a397884-d5c0-436b-b6bd-96943eecd180 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.701s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1444.875838] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ae09bd-7e51-c9fe-0132-327a31acb9e3, 'name': SearchDatastore_Task, 'duration_secs': 0.034197} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.876111] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.876401] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 6767c231-2dcb-4d19-ae7c-5b026d48ed26/6767c231-2dcb-4d19-ae7c-5b026d48ed26.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1444.876609] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3ec439e-fbff-4145-9bdb-b0ba488b7188 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.883298] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1444.883298] env[62816]: value = "task-1788125" [ 1444.883298] env[62816]: _type = "Task" [ 1444.883298] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.894669] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788125, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.951313] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "refresh_cache-0e0261fe-4376-487c-9d54-c4f37577409c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.951676] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance network_info: |[{"id": "b2b6a990-a634-4e68-ba4c-886b856209a5", "address": "fa:16:3e:df:cb:24", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b6a990-a6", "ovs_interfaceid": "b2b6a990-a634-4e68-ba4c-886b856209a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1444.952315] env[62816]: DEBUG oslo_concurrency.lockutils [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] Acquired lock "refresh_cache-0e0261fe-4376-487c-9d54-c4f37577409c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.952529] env[62816]: DEBUG nova.network.neutron [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Refreshing network info cache for port b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1444.953978] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:cb:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2b6a990-a634-4e68-ba4c-886b856209a5', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1444.962476] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating folder: Project (0f016ab6a03848ba8014647f483f0b92). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1444.965863] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a33ca65-b137-47e8-a294-caefd8c5c2d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.976292] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created folder: Project (0f016ab6a03848ba8014647f483f0b92) in parent group-v370905. [ 1444.976517] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating folder: Instances. Parent ref: group-v370983. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1444.977081] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-066a4ece-1378-40f3-a34e-5ab5ad5f1b6d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.986906] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created folder: Instances in parent group-v370983. [ 1444.987190] env[62816]: DEBUG oslo.service.loopingcall [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1444.987406] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1444.987638] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5323aa3f-06e5-4956-a0f0-201098ec9aea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.016288] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.016288] env[62816]: value = "task-1788128" [ 1445.016288] env[62816]: _type = "Task" [ 1445.016288] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.028009] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788128, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.069609] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1445.288176] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fcd43f4-48f5-474f-bf3e-a5fcd9194937 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.298370] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba0b51e-808e-49cf-82d7-d8eecd8cd2d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.333288] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bba692-aa38-4a1d-ae13-30e8d7c1c009 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.341738] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55b8dfd-af09-4896-b0cc-f599879c6a87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.363355] env[62816]: DEBUG nova.compute.provider_tree [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1445.396749] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788125, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.528558] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788128, 'name': CreateVM_Task, 'duration_secs': 0.394361} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.528777] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1445.529494] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.529703] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.530055] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1445.530366] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-215cb72d-5c1f-42b2-a260-df0410593a36 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.534829] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1445.534829] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f7ab21-e8e1-f793-33a1-37d3dd5ee889" [ 1445.534829] env[62816]: _type = "Task" [ 1445.534829] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.542952] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f7ab21-e8e1-f793-33a1-37d3dd5ee889, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.591677] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.859865] env[62816]: DEBUG nova.network.neutron [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Updated VIF entry in instance network info cache for port b2b6a990-a634-4e68-ba4c-886b856209a5. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1445.860402] env[62816]: DEBUG nova.network.neutron [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Updating instance_info_cache with network_info: [{"id": "b2b6a990-a634-4e68-ba4c-886b856209a5", "address": "fa:16:3e:df:cb:24", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b6a990-a6", "ovs_interfaceid": "b2b6a990-a634-4e68-ba4c-886b856209a5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.869021] env[62816]: DEBUG nova.scheduler.client.report [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1445.894801] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788125, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.516086} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.895196] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 6767c231-2dcb-4d19-ae7c-5b026d48ed26/6767c231-2dcb-4d19-ae7c-5b026d48ed26.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1445.895424] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1445.895714] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-759159e9-ecc2-448a-a96c-deed028c0299 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.905143] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1445.905143] env[62816]: value = "task-1788129" [ 1445.905143] env[62816]: _type = "Task" [ 1445.905143] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.908846] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.909160] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.909419] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1445.909647] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.909857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.912203] env[62816]: INFO nova.compute.manager [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Terminating instance [ 1445.917079] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.917819] env[62816]: DEBUG nova.compute.manager [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1445.918086] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1445.919708] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613c94ff-e03e-44e8-8af7-27f29fb8328a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.927783] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1445.928087] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1419bf34-1211-49b1-be50-5229c5930157 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.934787] env[62816]: DEBUG oslo_vmware.api [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1445.934787] env[62816]: value = "task-1788130" [ 1445.934787] env[62816]: _type = "Task" [ 1445.934787] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.942825] env[62816]: DEBUG oslo_vmware.api [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788130, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.049088] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f7ab21-e8e1-f793-33a1-37d3dd5ee889, 'name': SearchDatastore_Task, 'duration_secs': 0.008848} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.049088] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.049088] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.049088] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1446.049937] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.050392] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.051068] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0089ce9f-61c3-420d-90df-ab941abf6723 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.062029] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.062029] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1446.062029] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc56f8ab-f155-426e-a486-68aa78795171 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.068273] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1446.068273] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529c5fcb-d6ac-61dd-7e8d-7870494ce6b9" [ 1446.068273] env[62816]: _type = "Task" [ 1446.068273] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.081009] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529c5fcb-d6ac-61dd-7e8d-7870494ce6b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.363781] env[62816]: DEBUG oslo_concurrency.lockutils [req-7fdafdbb-6e89-4bd3-9bee-19f059bbb00b req-09fd2e40-586c-4435-b53f-5ddae3929056 service nova] Releasing lock "refresh_cache-0e0261fe-4376-487c-9d54-c4f37577409c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1446.374433] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.376732] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1446.380320] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.463s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.380320] env[62816]: DEBUG nova.objects.instance [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lazy-loading 'resources' on Instance uuid 927badc2-decf-49af-b2c0-d95b471272c9 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1446.416976] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148988} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.421374] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1446.422303] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7467bfb1-6ac8-4fdb-8dbc-89a578822e87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.448159] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 6767c231-2dcb-4d19-ae7c-5b026d48ed26/6767c231-2dcb-4d19-ae7c-5b026d48ed26.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1446.451935] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b3da4bc-465d-4bfd-8f6b-4b5f242d5c8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.474807] env[62816]: DEBUG oslo_vmware.api [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788130, 'name': PowerOffVM_Task, 'duration_secs': 0.19052} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.476699] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1446.476841] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1446.477448] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1446.477448] env[62816]: value = "task-1788131" [ 1446.477448] env[62816]: _type = "Task" [ 1446.477448] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.477763] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ebb6a73-d261-4f90-8d24-de8a06e802e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.491149] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.579428] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529c5fcb-d6ac-61dd-7e8d-7870494ce6b9, 'name': SearchDatastore_Task, 'duration_secs': 0.019123} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.580183] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5208a771-6b39-4efc-8d5d-2da844d5c2f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.586512] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1446.586512] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526c9afd-09bf-b4a8-05be-fe151f7e6de6" [ 1446.586512] env[62816]: _type = "Task" [ 1446.586512] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.594979] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526c9afd-09bf-b4a8-05be-fe151f7e6de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.725727] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1446.725971] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1446.726169] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Deleting the datastore file [datastore1] 0c5c5c06-0b5e-4e11-84b5-ca76828a0565 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1446.726446] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce26d779-7677-4553-b782-c2a5e90ff719 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.732987] env[62816]: DEBUG oslo_vmware.api [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1446.732987] env[62816]: value = "task-1788133" [ 1446.732987] env[62816]: _type = "Task" [ 1446.732987] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.742857] env[62816]: DEBUG oslo_vmware.api [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788133, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.886425] env[62816]: DEBUG nova.compute.utils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.888445] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1446.888445] env[62816]: DEBUG nova.network.neutron [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.953591] env[62816]: DEBUG nova.policy [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd3262feba6b94eddbe93814c58894995', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13061618f3be488280c98f34ae12f4fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1446.993480] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788131, 'name': ReconfigVM_Task, 'duration_secs': 0.317727} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.993762] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 6767c231-2dcb-4d19-ae7c-5b026d48ed26/6767c231-2dcb-4d19-ae7c-5b026d48ed26.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1446.994618] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2a3eaf6f-6470-4f02-8882-433be3db4776 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.004030] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1447.004030] env[62816]: value = "task-1788134" [ 1447.004030] env[62816]: _type = "Task" [ 1447.004030] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.013125] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788134, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.104636] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526c9afd-09bf-b4a8-05be-fe151f7e6de6, 'name': SearchDatastore_Task, 'duration_secs': 0.017119} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.105382] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.105382] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1447.105525] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e77d3713-e4ea-4d2b-add7-8e24660f5027 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.117734] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1447.117734] env[62816]: value = "task-1788135" [ 1447.117734] env[62816]: _type = "Task" [ 1447.117734] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.131735] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.247118] env[62816]: DEBUG oslo_vmware.api [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788133, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.198161} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.247414] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1447.247634] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1447.247860] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1447.248076] env[62816]: INFO nova.compute.manager [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1447.248719] env[62816]: DEBUG oslo.service.loopingcall [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.249195] env[62816]: DEBUG nova.compute.manager [-] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1447.249328] env[62816]: DEBUG nova.network.neutron [-] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1447.370985] env[62816]: DEBUG nova.network.neutron [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Successfully created port: 36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.390595] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1447.514042] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788134, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.516759] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de230fe-eb53-4f07-804f-a5dc19c2111c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.524511] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3042b7cf-f339-4874-88ab-4ebdcaf37255 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.555810] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4a9cd0-0844-479e-b687-7e04fe7478e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.564756] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9da2989-b6a9-4c64-bf13-6bf4cdc83769 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.583416] env[62816]: DEBUG nova.compute.provider_tree [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.631275] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.756583] env[62816]: DEBUG nova.compute.manager [req-0aca9912-0c49-4585-9e2b-ad8258c161f2 req-e6a8a17d-de1f-414c-8cb2-a489a376bd1e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-vif-deleted-2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1447.756779] env[62816]: INFO nova.compute.manager [req-0aca9912-0c49-4585-9e2b-ad8258c161f2 req-e6a8a17d-de1f-414c-8cb2-a489a376bd1e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Neutron deleted interface 2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce; detaching it from the instance and deleting it from the info cache [ 1447.757071] env[62816]: DEBUG nova.network.neutron [req-0aca9912-0c49-4585-9e2b-ad8258c161f2 req-e6a8a17d-de1f-414c-8cb2-a489a376bd1e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updating instance_info_cache with network_info: [{"id": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "address": "fa:16:3e:8f:c4:4d", "network": {"id": "e7634fcd-3acc-4f56-8b7d-685211ca51cb", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1717224539", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.71", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1880df72-582c-44cb-992d-88dc6a514914", "external-id": "nsx-vlan-transportzone-808", "segmentation_id": 808, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54c5cba6-8e", "ovs_interfaceid": "54c5cba6-8e06-4d70-bb84-fc0420096ff8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "address": "fa:16:3e:d9:67:7b", "network": {"id": "838a6f09-3c90-493e-a109-2978a23603b0", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1800357631", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.157", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dabbac20-1723-40ad-9da0-e53b28073651", "external-id": "nsx-vlan-transportzone-790", "segmentation_id": 790, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27b70ae2-92", "ovs_interfaceid": "27b70ae2-92a3-40bb-b6d0-5a06e860d0ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.017714] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788134, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.092031] env[62816]: DEBUG nova.scheduler.client.report [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1448.134524] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788135, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.259383] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2471cd21-4f8e-4339-9750-ed88b83d2ca9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.270133] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-106fc524-9ec6-4ade-8563-7fb1db1bd105 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.302502] env[62816]: DEBUG nova.compute.manager [req-0aca9912-0c49-4585-9e2b-ad8258c161f2 req-e6a8a17d-de1f-414c-8cb2-a489a376bd1e service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Detach interface failed, port_id=2ff6f344-ea63-41f8-9cc5-c6c2dd3857ce, reason: Instance 0c5c5c06-0b5e-4e11-84b5-ca76828a0565 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1448.399163] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1448.433817] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1448.434075] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1448.434776] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1448.434776] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1448.434776] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1448.434776] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1448.434987] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1448.437007] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1448.437418] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1448.437700] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1448.437948] env[62816]: DEBUG nova.virt.hardware [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1448.439071] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242fc7bc-6965-4535-af35-7c943c09a3ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.450031] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98650b7-426f-4f20-8418-1c44ff7105a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.514643] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788134, 'name': Rename_Task, 'duration_secs': 1.152749} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.514962] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1448.515648] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a579ca8-c1a0-457a-8273-9a3887d497c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.522091] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1448.522091] env[62816]: value = "task-1788136" [ 1448.522091] env[62816]: _type = "Task" [ 1448.522091] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.535079] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788136, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.597296] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.218s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.600038] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 29.988s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.623123] env[62816]: DEBUG nova.network.neutron [-] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.626151] env[62816]: INFO nova.scheduler.client.report [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleted allocations for instance 927badc2-decf-49af-b2c0-d95b471272c9 [ 1448.634484] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788135, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.482926} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.635458] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1448.635458] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1448.636061] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08c95ef3-b5c2-45b4-a71b-f112ee6fdf31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.642649] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1448.642649] env[62816]: value = "task-1788137" [ 1448.642649] env[62816]: _type = "Task" [ 1448.642649] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.651932] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.995209] env[62816]: DEBUG nova.compute.manager [req-906d5fe9-36ab-415b-98f5-356198429f55 req-b9c84448-e9ac-4916-9dea-88e62732e136 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Received event network-vif-plugged-36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.995422] env[62816]: DEBUG oslo_concurrency.lockutils [req-906d5fe9-36ab-415b-98f5-356198429f55 req-b9c84448-e9ac-4916-9dea-88e62732e136 service nova] Acquiring lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.995621] env[62816]: DEBUG oslo_concurrency.lockutils [req-906d5fe9-36ab-415b-98f5-356198429f55 req-b9c84448-e9ac-4916-9dea-88e62732e136 service nova] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.996026] env[62816]: DEBUG oslo_concurrency.lockutils [req-906d5fe9-36ab-415b-98f5-356198429f55 req-b9c84448-e9ac-4916-9dea-88e62732e136 service nova] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.996194] env[62816]: DEBUG nova.compute.manager [req-906d5fe9-36ab-415b-98f5-356198429f55 req-b9c84448-e9ac-4916-9dea-88e62732e136 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] No waiting events found dispatching network-vif-plugged-36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.996526] env[62816]: WARNING nova.compute.manager [req-906d5fe9-36ab-415b-98f5-356198429f55 req-b9c84448-e9ac-4916-9dea-88e62732e136 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Received unexpected event network-vif-plugged-36672ebf-8330-4f63-8a9e-840cab593685 for instance with vm_state building and task_state spawning. [ 1449.032853] env[62816]: DEBUG oslo_vmware.api [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788136, 'name': PowerOnVM_Task, 'duration_secs': 0.476519} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.033498] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1449.033820] env[62816]: INFO nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Took 9.15 seconds to spawn the instance on the hypervisor. [ 1449.033932] env[62816]: DEBUG nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1449.034713] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da84ca85-3246-433d-a2ba-ca258e7a84bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.038117] env[62816]: DEBUG nova.network.neutron [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Successfully updated port: 36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.130631] env[62816]: INFO nova.compute.manager [-] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Took 1.88 seconds to deallocate network for instance. [ 1449.141863] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5bf5618d-b044-4d93-9b73-6876e79d0bda tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "927badc2-decf-49af-b2c0-d95b471272c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.393s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1449.151984] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063065} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.152263] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1449.153028] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397207b0-93d4-4747-a7be-72fe456e616a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.178017] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1449.178604] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48bcacef-ade9-4bb0-b21b-cec346e388ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.199507] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1449.199507] env[62816]: value = "task-1788138" [ 1449.199507] env[62816]: _type = "Task" [ 1449.199507] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.211266] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788138, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.542099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "refresh_cache-a6b06048-6cdc-497e-8c5d-b6a26d3e7557" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1449.542099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "refresh_cache-a6b06048-6cdc-497e-8c5d-b6a26d3e7557" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.542099] env[62816]: DEBUG nova.network.neutron [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.559577] env[62816]: INFO nova.compute.manager [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Took 42.70 seconds to build instance. [ 1449.617666] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Applying migration context for instance 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd as it has an incoming, in-progress migration de83f004-df93-40fc-a350-b89d170de652. Migration status is confirming {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1449.619439] env[62816]: INFO nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating resource usage from migration de83f004-df93-40fc-a350-b89d170de652 [ 1449.641801] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1449.646277] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 99bd7579-7097-41df-a8c0-e12a3863a3dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.646441] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f06102d6-be5c-40d1-ae1d-8ae8190fd0d7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1449.646571] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0b10aca0-950b-46f6-8367-5cb9ea7540c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.646691] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 11a4d835-c149-49f0-8e4f-b3f9a7f1afca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.646820] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 66745316-2735-4c49-b1a2-f9e547211761 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1449.646938] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f6ddaab3-d420-4ee4-bf75-486228826635 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.647075] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 455052cc-292a-414c-8c83-bc512c49a197 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1449.647196] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance fb84cb48-d1a1-4eec-adb8-8edc585263df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.647329] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 2bc7f973-007d-44bd-aae8-d3b62506efba is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1449.647653] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 679cd9a3-2ed6-451f-b934-ba7738913959 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.647653] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f1914aaa-1f3d-48b7-a6d2-ceea16dc786a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.647653] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Migration de83f004-df93-40fc-a350-b89d170de652 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1449.647823] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 52670f9e-0cb7-4464-be9c-7b0d8346f60f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1449.647872] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.648018] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance with task_state "deleting" is not being actively managed by this compute host but has allocations referencing this compute node (27f49c85-1bb9-4d17-a914-e2f45a5e84fa): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1449.648151] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 914b187f-b05f-49d4-bf61-d536ef61934d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1449.648270] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 42093232-a4e5-4cc3-ab1c-a0023a91e102 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.648436] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0c5c5c06-0b5e-4e11-84b5-ca76828a0565 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.648574] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance cf6ff174-1324-42bd-a77a-905b9a333c27 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.648689] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 6767c231-2dcb-4d19-ae7c-5b026d48ed26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.648799] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0e0261fe-4376-487c-9d54-c4f37577409c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.648909] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance a6b06048-6cdc-497e-8c5d-b6a26d3e7557 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1449.712609] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788138, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.818999] env[62816]: DEBUG nova.compute.manager [req-15958999-d4f4-4e0b-8eba-76b6715d65ed req-23c29e7e-5bce-409f-b5c8-b3e8cf03ada2 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-vif-deleted-54c5cba6-8e06-4d70-bb84-fc0420096ff8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1449.818999] env[62816]: DEBUG nova.compute.manager [req-15958999-d4f4-4e0b-8eba-76b6715d65ed req-23c29e7e-5bce-409f-b5c8-b3e8cf03ada2 service nova] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Received event network-vif-deleted-27b70ae2-92a3-40bb-b6d0-5a06e860d0ad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.063554] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ddf61224-99e5-47a5-ac7b-c06a3191963a tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.417s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.077925] env[62816]: DEBUG nova.network.neutron [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1450.151842] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 128bd207-a483-4b38-9fd4-4fb996ce1d0d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1450.212023] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788138, 'name': ReconfigVM_Task, 'duration_secs': 0.862801} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.212239] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1450.212967] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65b20028-fb7e-48c8-9b8c-40df5a84f647 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.220042] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1450.220042] env[62816]: value = "task-1788139" [ 1450.220042] env[62816]: _type = "Task" [ 1450.220042] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.231593] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788139, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.280352] env[62816]: DEBUG nova.network.neutron [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Updating instance_info_cache with network_info: [{"id": "36672ebf-8330-4f63-8a9e-840cab593685", "address": "fa:16:3e:f4:ac:49", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36672ebf-83", "ovs_interfaceid": "36672ebf-8330-4f63-8a9e-840cab593685", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.566879] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1450.656173] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0a1a8539-940a-4a17-9826-82736be41892 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1450.730732] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788139, 'name': Rename_Task, 'duration_secs': 0.1486} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.731066] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1450.731352] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26f33661-8253-4e70-a04b-60de7219dece {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.737279] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1450.737279] env[62816]: value = "task-1788140" [ 1450.737279] env[62816]: _type = "Task" [ 1450.737279] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.745760] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.783554] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "refresh_cache-a6b06048-6cdc-497e-8c5d-b6a26d3e7557" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.784232] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Instance network_info: |[{"id": "36672ebf-8330-4f63-8a9e-840cab593685", "address": "fa:16:3e:f4:ac:49", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36672ebf-83", "ovs_interfaceid": "36672ebf-8330-4f63-8a9e-840cab593685", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1450.784425] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:ac:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ed91b7b-b4ec-486d-ab34-af0afb7ec691', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36672ebf-8330-4f63-8a9e-840cab593685', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1450.792365] env[62816]: DEBUG oslo.service.loopingcall [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.792571] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1450.792803] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba40ff15-5d8e-4cfa-a27c-88304b6611f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.812784] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1450.812784] env[62816]: value = "task-1788141" [ 1450.812784] env[62816]: _type = "Task" [ 1450.812784] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.820783] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788141, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.089641] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.159754] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1c3392d3-cfb0-47c6-9366-8c363ad21297 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.204731] env[62816]: DEBUG nova.compute.manager [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Received event network-changed-36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1451.205199] env[62816]: DEBUG nova.compute.manager [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Refreshing instance network info cache due to event network-changed-36672ebf-8330-4f63-8a9e-840cab593685. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1451.205511] env[62816]: DEBUG oslo_concurrency.lockutils [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] Acquiring lock "refresh_cache-a6b06048-6cdc-497e-8c5d-b6a26d3e7557" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.205657] env[62816]: DEBUG oslo_concurrency.lockutils [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] Acquired lock "refresh_cache-a6b06048-6cdc-497e-8c5d-b6a26d3e7557" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.205925] env[62816]: DEBUG nova.network.neutron [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Refreshing network info cache for port 36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.248532] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788140, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.280598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "ba6e94c9-eb58-4040-8e28-f255961e76ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.281297] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.324550] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788141, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.666921] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance e1067d45-1938-4021-b902-21a1aa57058a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1451.750442] env[62816]: DEBUG oslo_vmware.api [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788140, 'name': PowerOnVM_Task, 'duration_secs': 0.759945} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.750777] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1451.751051] env[62816]: INFO nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1451.752380] env[62816]: DEBUG nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1451.752380] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8c047c-4804-41e8-86bc-dec986dd7989 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.824122] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788141, 'name': CreateVM_Task, 'duration_secs': 0.668697} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.824122] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1451.824779] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.824944] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.825284] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.825544] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eaa0792-eeb3-44aa-b55d-398058b8f3c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.830133] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1451.830133] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52660392-7fc5-130b-da00-05de5913fbc1" [ 1451.830133] env[62816]: _type = "Task" [ 1451.830133] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.838124] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52660392-7fc5-130b-da00-05de5913fbc1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.873966] env[62816]: DEBUG nova.compute.manager [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-changed-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1451.874172] env[62816]: DEBUG nova.compute.manager [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Refreshing instance network info cache due to event network-changed-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1451.874382] env[62816]: DEBUG oslo_concurrency.lockutils [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] Acquiring lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.874527] env[62816]: DEBUG oslo_concurrency.lockutils [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] Acquired lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.874780] env[62816]: DEBUG nova.network.neutron [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Refreshing network info cache for port 51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.989612] env[62816]: DEBUG nova.network.neutron [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Updated VIF entry in instance network info cache for port 36672ebf-8330-4f63-8a9e-840cab593685. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1451.990055] env[62816]: DEBUG nova.network.neutron [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Updating instance_info_cache with network_info: [{"id": "36672ebf-8330-4f63-8a9e-840cab593685", "address": "fa:16:3e:f4:ac:49", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36672ebf-83", "ovs_interfaceid": "36672ebf-8330-4f63-8a9e-840cab593685", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.170363] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1452.271311] env[62816]: INFO nova.compute.manager [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Took 44.76 seconds to build instance. [ 1452.342054] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52660392-7fc5-130b-da00-05de5913fbc1, 'name': SearchDatastore_Task, 'duration_secs': 0.041074} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.342581] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.342960] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.345039] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.345039] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.345039] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.345039] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a79d78b4-6f53-4341-94cd-8aba58187406 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.354788] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.355207] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.356473] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cac55be2-a248-41c2-a908-3dbaf89c82fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.362691] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1452.362691] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52170df9-c80f-4cc0-18a4-cd335f6f1274" [ 1452.362691] env[62816]: _type = "Task" [ 1452.362691] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.371744] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52170df9-c80f-4cc0-18a4-cd335f6f1274, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.492554] env[62816]: DEBUG oslo_concurrency.lockutils [req-97d7b639-e54b-4a71-ae6d-9de8f2640746 req-bbeadeb6-440b-4404-8a67-a57c4876864f service nova] Releasing lock "refresh_cache-a6b06048-6cdc-497e-8c5d-b6a26d3e7557" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.663407] env[62816]: DEBUG nova.network.neutron [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updated VIF entry in instance network info cache for port 51d24096-dc5e-4a89-a26a-e0cf4eb85e6a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1452.663407] env[62816]: DEBUG nova.network.neutron [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.673767] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1452.774457] env[62816]: DEBUG oslo_concurrency.lockutils [None req-435c1047-d240-48d9-a726-54659fd689d3 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0e0261fe-4376-487c-9d54-c4f37577409c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.089s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.874680] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52170df9-c80f-4cc0-18a4-cd335f6f1274, 'name': SearchDatastore_Task, 'duration_secs': 0.011295} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.875851] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebdbf81d-5d9e-423e-8398-2bc726664d30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.881194] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1452.881194] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52df42af-5360-3c7c-51ce-e8d2cde92914" [ 1452.881194] env[62816]: _type = "Task" [ 1452.881194] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.889815] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52df42af-5360-3c7c-51ce-e8d2cde92914, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.165707] env[62816]: DEBUG oslo_concurrency.lockutils [req-ec6d4c21-42d4-47cf-a1ba-33caab131651 req-c62c9382-5f7c-4a4c-956f-b89f523993e7 service nova] Releasing lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.175963] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c6dc008c-6336-4271-9635-a7e0652138e0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1453.278135] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1453.393533] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52df42af-5360-3c7c-51ce-e8d2cde92914, 'name': SearchDatastore_Task, 'duration_secs': 0.009439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.393824] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.394095] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a6b06048-6cdc-497e-8c5d-b6a26d3e7557/a6b06048-6cdc-497e-8c5d-b6a26d3e7557.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1453.394370] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33eb5bc2-a765-4763-8c22-b991056bb362 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.401589] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1453.401589] env[62816]: value = "task-1788142" [ 1453.401589] env[62816]: _type = "Task" [ 1453.401589] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.410192] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788142, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.678600] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b409568f-6e04-4218-8a7b-1bbf785115c3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1453.801070] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1453.912733] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788142, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.181902] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance d16a99df-f092-4d56-9730-852883bbdb70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1454.416017] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788142, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.523166} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.416017] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a6b06048-6cdc-497e-8c5d-b6a26d3e7557/a6b06048-6cdc-497e-8c5d-b6a26d3e7557.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1454.416017] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1454.416017] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-081ab400-ee2e-444c-bc37-a9e4c9f9abf4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.420823] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1454.420823] env[62816]: value = "task-1788143" [ 1454.420823] env[62816]: _type = "Task" [ 1454.420823] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.431145] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788143, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.685050] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1454.930549] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788143, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.179778} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.930839] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.931629] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d1dc0e-e0bd-4e69-bd87-2ff65d3a42c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.954033] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] a6b06048-6cdc-497e-8c5d-b6a26d3e7557/a6b06048-6cdc-497e-8c5d-b6a26d3e7557.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.954364] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9ef88c5-65c2-43c4-bfb0-ce5babee9d8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.975182] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1454.975182] env[62816]: value = "task-1788144" [ 1454.975182] env[62816]: _type = "Task" [ 1454.975182] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.984946] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788144, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.187943] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance afd02433-0912-44ef-8e0e-71d6ee8fbb41 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1455.486433] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.691120] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance e003e41d-93e8-4258-b8ca-3c2420b73df0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1455.989357] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788144, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.197743] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 946dad01-c012-457d-8bfe-6395ff0aaedf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1456.491160] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788144, 'name': ReconfigVM_Task, 'duration_secs': 1.042928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.491160] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Reconfigured VM instance instance-0000001a to attach disk [datastore1] a6b06048-6cdc-497e-8c5d-b6a26d3e7557/a6b06048-6cdc-497e-8c5d-b6a26d3e7557.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1456.491160] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e422a37-e67f-499c-8bf1-2f4d43f99b64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.500161] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1456.500161] env[62816]: value = "task-1788145" [ 1456.500161] env[62816]: _type = "Task" [ 1456.500161] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.511599] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788145, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.701938] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 48b74d52-e764-4d14-b372-fc34872205dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1456.702292] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1456.702610] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3520MB phys_disk=200GB used_disk=14GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1457.016257] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788145, 'name': Rename_Task, 'duration_secs': 0.139791} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.016257] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1457.016257] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b0d2b2d-66fa-4913-9eeb-c5ed278167ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.022573] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1457.022573] env[62816]: value = "task-1788146" [ 1457.022573] env[62816]: _type = "Task" [ 1457.022573] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.034673] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.287721] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec442f5e-946a-46e7-b4c8-405d1074f0c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.295710] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f3fba7-03cb-4bf2-894f-706902f554c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.329040] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8eda84-126d-4536-9bcf-561049593d8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.338109] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c577afc5-cf51-4b13-8b62-cf89421e2d63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.351828] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.535721] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788146, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.860501] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1458.034533] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788146, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.123816] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "0dbf907f-0313-435c-a8be-19f7e48ded76" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.124069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.367844] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1458.368146] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.768s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.368428] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.022s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.369883] env[62816]: INFO nova.compute.claims [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1458.376086] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.533331] env[62816]: DEBUG oslo_vmware.api [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788146, 'name': PowerOnVM_Task, 'duration_secs': 1.073595} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.533607] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1458.533812] env[62816]: INFO nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Took 10.13 seconds to spawn the instance on the hypervisor. [ 1458.533988] env[62816]: DEBUG nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1458.534773] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33136e01-074c-4cf8-94fe-4e613d5ca383 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.962606] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.963292] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.963292] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1458.963292] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1459.058523] env[62816]: INFO nova.compute.manager [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Took 45.20 seconds to build instance. [ 1459.470584] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Skipping network cache update for instance because it is being deleted. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1459.470584] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Skipping network cache update for instance because it is Building. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1459.498678] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.498857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.499014] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1459.499242] env[62816]: DEBUG nova.objects.instance [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lazy-loading 'info_cache' on Instance uuid 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1459.561059] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e637c2-b948-4f2b-9112-a41ddf209cdb tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.377s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.828616] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4200ce0-cc17-4bad-85c3-de250f0072dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.836385] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024f9ba6-6859-4bdf-94d2-780e2049b349 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.866109] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1010c13-1b1c-415a-988e-58a3aa50ebb0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.873572] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc6d202-ce3f-4727-9391-19a560d491ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.888136] env[62816]: DEBUG nova.compute.provider_tree [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1460.065428] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1460.391637] env[62816]: DEBUG nova.scheduler.client.report [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1460.588914] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.896978] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.897563] env[62816]: DEBUG nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1460.900203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.240s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.900949] env[62816]: DEBUG nova.objects.instance [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1461.061224] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1461.061520] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1461.061729] env[62816]: DEBUG nova.compute.manager [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1461.062802] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba81346a-84e2-46f4-a35a-c3f77776fdaf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.069980] env[62816]: DEBUG nova.compute.manager [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1461.070630] env[62816]: DEBUG nova.objects.instance [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lazy-loading 'flavor' on Instance uuid 42093232-a4e5-4cc3-ab1c-a0023a91e102 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1461.233705] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [{"id": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "address": "fa:16:3e:d9:3d:cd", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.221", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc37f87b0-74", "ovs_interfaceid": "c37f87b0-7404-4bad-89e7-5ebbccb43aad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.405786] env[62816]: DEBUG nova.compute.utils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1461.410290] env[62816]: DEBUG nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1461.577081] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1461.577197] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a4871ce5-4b6b-480d-a717-4af5a757736c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.586028] env[62816]: DEBUG oslo_vmware.api [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1461.586028] env[62816]: value = "task-1788147" [ 1461.586028] env[62816]: _type = "Task" [ 1461.586028] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.593335] env[62816]: DEBUG oslo_vmware.api [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788147, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.737295] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.737295] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1461.737295] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.737678] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.737844] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.737909] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.738101] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.738301] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.738435] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1461.738581] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.911722] env[62816]: DEBUG nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1461.915459] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0dfcf21a-e5f5-482b-a913-aac0bdf28893 tempest-ServersAdmin275Test-79620773 tempest-ServersAdmin275Test-79620773-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.917389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 38.982s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.095131] env[62816]: DEBUG oslo_vmware.api [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788147, 'name': PowerOffVM_Task, 'duration_secs': 0.20777} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.095415] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1462.095590] env[62816]: DEBUG nova.compute.manager [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1462.096330] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959c53a6-a175-4be0-87f2-c8a3bb1ac44c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.242315] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.608154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b662149c-947d-4795-b55e-a6e4f78ae99c tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1462.838972] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c421a5-d640-4d65-bfbb-4b6977a9e540 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.846889] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2babc1a-d036-41e3-8edf-b301c4808c71 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.877971] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fdc160-9f7d-434f-a25f-eacec2d5065d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.885786] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75cf93f-5fc1-4f0d-bc05-b7a512de36a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.900075] env[62816]: DEBUG nova.compute.provider_tree [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.924614] env[62816]: DEBUG nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1462.949518] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1462.949775] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1462.949943] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1462.950146] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1462.950293] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1462.950432] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1462.950680] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1462.950844] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1462.951026] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1462.951260] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1462.951487] env[62816]: DEBUG nova.virt.hardware [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1462.952380] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6a7f63-4024-40ac-a2be-1d871b47faaa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.960457] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0dabef-cce3-4b89-9cae-53605007f529 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.974403] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1462.979943] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Creating folder: Project (a575b969e16b40718491f724ea2831e6). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1462.980605] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2888d5b-d031-47ee-a8d9-30abcce92a85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.991767] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Created folder: Project (a575b969e16b40718491f724ea2831e6) in parent group-v370905. [ 1462.991963] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Creating folder: Instances. Parent ref: group-v370987. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1462.992215] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba30a54d-d08d-49e6-a8c0-0bdfab863d54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.000844] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Created folder: Instances in parent group-v370987. [ 1463.001059] env[62816]: DEBUG oslo.service.loopingcall [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1463.001307] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1463.001437] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdb35dcf-7bb0-40aa-9b3e-c2f7feabed5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.019146] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1463.019146] env[62816]: value = "task-1788150" [ 1463.019146] env[62816]: _type = "Task" [ 1463.019146] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.026241] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788150, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.403805] env[62816]: DEBUG nova.scheduler.client.report [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1463.518962] env[62816]: DEBUG nova.objects.instance [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lazy-loading 'flavor' on Instance uuid 42093232-a4e5-4cc3-ab1c-a0023a91e102 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1463.529583] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788150, 'name': CreateVM_Task, 'duration_secs': 0.275906} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.529754] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1463.530184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1463.530352] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1463.530691] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1463.531501] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b7f28a9-865b-495b-a292-2f59a52e9c4c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.536703] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1463.536703] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524f24e7-de0b-718b-65d9-12ef8f1e1876" [ 1463.536703] env[62816]: _type = "Task" [ 1463.536703] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.543745] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524f24e7-de0b-718b-65d9-12ef8f1e1876, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.024648] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.024865] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquired lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.025078] env[62816]: DEBUG nova.network.neutron [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1464.025272] env[62816]: DEBUG nova.objects.instance [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lazy-loading 'info_cache' on Instance uuid 42093232-a4e5-4cc3-ab1c-a0023a91e102 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1464.047560] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524f24e7-de0b-718b-65d9-12ef8f1e1876, 'name': SearchDatastore_Task, 'duration_secs': 0.009168} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.047839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1464.048079] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1464.048306] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1464.048449] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1464.048622] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1464.049117] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-249b8e62-960a-4d12-bff0-df78226021c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.057631] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1464.057631] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1464.057755] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-472be69a-ec74-45c5-82f8-4dfe10516de0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.062426] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1464.062426] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5200dc3d-b80a-b9db-2d80-0522fb48ba82" [ 1464.062426] env[62816]: _type = "Task" [ 1464.062426] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.070780] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5200dc3d-b80a-b9db-2d80-0522fb48ba82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.415370] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.499s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.418224] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.158s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.418418] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.421045] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.128s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.421045] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.422558] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.420s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.422745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.424253] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.379s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1464.425633] env[62816]: INFO nova.compute.claims [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1464.455323] env[62816]: INFO nova.scheduler.client.report [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted allocations for instance 455052cc-292a-414c-8c83-bc512c49a197 [ 1464.456986] env[62816]: INFO nova.scheduler.client.report [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Deleted allocations for instance 2bc7f973-007d-44bd-aae8-d3b62506efba [ 1464.491367] env[62816]: INFO nova.scheduler.client.report [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Deleted allocations for instance 66745316-2735-4c49-b1a2-f9e547211761 [ 1464.528447] env[62816]: DEBUG nova.objects.base [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Object Instance<42093232-a4e5-4cc3-ab1c-a0023a91e102> lazy-loaded attributes: flavor,info_cache {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1464.571889] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5200dc3d-b80a-b9db-2d80-0522fb48ba82, 'name': SearchDatastore_Task, 'duration_secs': 0.00812} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1464.572663] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25abdc95-271f-4f20-ba8e-dc840fd67f4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.577939] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1464.577939] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fdc77a-da9f-4c6a-b82b-3e0f16fa473f" [ 1464.577939] env[62816]: _type = "Task" [ 1464.577939] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1464.585834] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fdc77a-da9f-4c6a-b82b-3e0f16fa473f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.973898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70edbe53-33a8-4150-b256-8c6705cc0d74 tempest-ServerDiagnosticsV248Test-1606915982 tempest-ServerDiagnosticsV248Test-1606915982-project-member] Lock "2bc7f973-007d-44bd-aae8-d3b62506efba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.814s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.984167] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1f472e1-d402-43e2-a624-3ff8b5f54bd3 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "455052cc-292a-414c-8c83-bc512c49a197" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.244s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.990384] env[62816]: INFO nova.scheduler.client.report [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleted allocation for migration de83f004-df93-40fc-a350-b89d170de652 [ 1464.998577] env[62816]: DEBUG oslo_concurrency.lockutils [None req-651c5993-ce4a-4f0e-83de-de7e12869e00 tempest-ServersAdmin275Test-531686158 tempest-ServersAdmin275Test-531686158-project-member] Lock "66745316-2735-4c49-b1a2-f9e547211761" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.474s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.088775] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fdc77a-da9f-4c6a-b82b-3e0f16fa473f, 'name': SearchDatastore_Task, 'duration_secs': 0.010947} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1465.089065] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1465.089326] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 128bd207-a483-4b38-9fd4-4fb996ce1d0d/128bd207-a483-4b38-9fd4-4fb996ce1d0d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1465.089613] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76953c7d-9636-4243-ab03-00920ef4a232 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.100314] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1465.100314] env[62816]: value = "task-1788151" [ 1465.100314] env[62816]: _type = "Task" [ 1465.100314] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1465.108733] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788151, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.487530] env[62816]: DEBUG nova.network.neutron [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Updating instance_info_cache with network_info: [{"id": "cd98f4df-d678-4280-8111-86d76a117d36", "address": "fa:16:3e:92:19:df", "network": {"id": "794925ac-c8b1-4374-bc73-431be95b8505", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1309057269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13061618f3be488280c98f34ae12f4fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ed91b7b-b4ec-486d-ab34-af0afb7ec691", "external-id": "nsx-vlan-transportzone-75", "segmentation_id": 75, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcd98f4df-d6", "ovs_interfaceid": "cd98f4df-d678-4280-8111-86d76a117d36", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1465.498526] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d70812fd-75c5-4302-8a3f-c46233432661 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 46.159s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.614347] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788151, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.933793] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a1c695-7050-42ed-9369-1080f85311cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.941785] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60978729-112a-45f1-bc8b-44e3aa8babdd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.972749] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f92e21-9320-471d-8cc5-894825c2f270 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.983016] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452be2c7-665d-461b-8095-17c0415e4035 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.994912] env[62816]: DEBUG nova.compute.provider_tree [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1465.996784] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Releasing lock "refresh_cache-42093232-a4e5-4cc3-ab1c-a0023a91e102" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1466.111828] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788151, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521487} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.112160] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 128bd207-a483-4b38-9fd4-4fb996ce1d0d/128bd207-a483-4b38-9fd4-4fb996ce1d0d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1466.112336] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1466.112619] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f280e40-e665-4656-beb3-ff0471245d12 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.119669] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1466.119669] env[62816]: value = "task-1788152" [ 1466.119669] env[62816]: _type = "Task" [ 1466.119669] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.128556] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788152, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.499210] env[62816]: DEBUG nova.scheduler.client.report [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1466.503162] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.503452] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2f7b90d-f7ca-462a-a2ae-cdc63bbeb289 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.512286] env[62816]: DEBUG oslo_vmware.api [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1466.512286] env[62816]: value = "task-1788153" [ 1466.512286] env[62816]: _type = "Task" [ 1466.512286] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.529971] env[62816]: DEBUG oslo_vmware.api [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788153, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.635813] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788152, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063245} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.635813] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1466.636918] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edcfe9f3-1c04-4810-9003-08536a6eebca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.657343] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] 128bd207-a483-4b38-9fd4-4fb996ce1d0d/128bd207-a483-4b38-9fd4-4fb996ce1d0d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.659101] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-39843868-3c20-4111-943c-8d987dca7493 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.679101] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1466.679101] env[62816]: value = "task-1788154" [ 1466.679101] env[62816]: _type = "Task" [ 1466.679101] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.687126] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788154, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.007604] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.583s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.008072] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1467.010797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.875s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.012352] env[62816]: INFO nova.compute.claims [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1467.025847] env[62816]: DEBUG oslo_vmware.api [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.190625] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788154, 'name': ReconfigVM_Task, 'duration_secs': 0.294472} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.191173] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Reconfigured VM instance instance-0000001b to attach disk [datastore1] 128bd207-a483-4b38-9fd4-4fb996ce1d0d/128bd207-a483-4b38-9fd4-4fb996ce1d0d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1467.191968] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f32f16d-44a1-4017-90cf-78de87c0af9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.199574] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1467.199574] env[62816]: value = "task-1788155" [ 1467.199574] env[62816]: _type = "Task" [ 1467.199574] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.207982] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788155, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.408468] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "9bda24c6-f950-47ff-ad3c-ff745291870c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.408813] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.516805] env[62816]: DEBUG nova.compute.utils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1467.518272] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1467.518445] env[62816]: DEBUG nova.network.neutron [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1467.532180] env[62816]: DEBUG oslo_vmware.api [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788153, 'name': PowerOnVM_Task, 'duration_secs': 0.726308} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.532484] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.532699] env[62816]: DEBUG nova.compute.manager [None req-dc53ba7a-c080-427f-a660-9f6926ce4b3f tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1467.533503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f313f4f4-e327-4144-8855-9f805ef29daf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.566205] env[62816]: DEBUG nova.policy [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21ed3abad90741799db9f998a15c7787', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f016ab6a03848ba8014647f483f0b92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1467.712289] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788155, 'name': Rename_Task, 'duration_secs': 0.145284} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.712617] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1467.712874] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea1e2cdb-fff8-4dcf-bf7b-2bc68be7a319 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.719768] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1467.719768] env[62816]: value = "task-1788156" [ 1467.719768] env[62816]: _type = "Task" [ 1467.719768] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.727962] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.936651] env[62816]: DEBUG nova.network.neutron [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Successfully created port: c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1468.024752] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1468.232799] env[62816]: DEBUG oslo_vmware.api [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788156, 'name': PowerOnVM_Task, 'duration_secs': 0.447011} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.232799] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1468.233018] env[62816]: INFO nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Took 5.31 seconds to spawn the instance on the hypervisor. [ 1468.233231] env[62816]: DEBUG nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1468.234096] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a08bb08-4224-4c60-85ab-6892a1d66e5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.416814] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.417481] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.608016] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66fa255-35ea-47a4-89e8-3dbb0bd58a19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.616822] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee56341-758e-496b-b212-f35f644ce075 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.648839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4142b3c-f5cf-4e31-99ca-401950457be8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.656839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bf99bb-38f3-459d-a906-ea6a316196bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.671858] env[62816]: DEBUG nova.compute.provider_tree [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1468.755332] env[62816]: INFO nova.compute.manager [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Took 47.43 seconds to build instance. [ 1469.039927] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1469.071777] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1469.072108] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1469.072277] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1469.072465] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1469.072642] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1469.072800] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1469.073020] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1469.073187] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1469.073354] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1469.073514] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1469.073713] env[62816]: DEBUG nova.virt.hardware [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1469.074595] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3e2ef1-045c-4079-960c-6206b11e0f06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.082908] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188fa8e5-b9a5-4a54-ab0f-26592a5bf6a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.194651] env[62816]: ERROR nova.scheduler.client.report [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [req-22fb3f98-ab51-41ab-b9f1-c272875ca692] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-22fb3f98-ab51-41ab-b9f1-c272875ca692"}]} [ 1469.210698] env[62816]: DEBUG nova.scheduler.client.report [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1469.225881] env[62816]: DEBUG nova.scheduler.client.report [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1469.226164] env[62816]: DEBUG nova.compute.provider_tree [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1469.238718] env[62816]: DEBUG nova.scheduler.client.report [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1469.256585] env[62816]: DEBUG nova.scheduler.client.report [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1469.258820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a3811493-eea2-42ea-8deb-c51aa48e9274 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.684s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.634551] env[62816]: DEBUG nova.compute.manager [None req-3a5df23a-d770-4954-904e-96d29905b0d4 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1469.634860] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0108d568-4634-46df-99b0-5d12fa3caeed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.697772] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd91b3ea-1281-40a8-a95e-f47ffe0dd234 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.706117] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df57bd1-2506-4402-b000-ffcf671a92e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.743162] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81610d2c-8005-47e7-b8cb-8c15507ee91c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.745287] env[62816]: DEBUG nova.compute.manager [req-c7e6cb1f-5c6b-4b6d-a931-8392259131a0 req-fcdb87f5-41fc-422f-be5d-7406962e0c56 service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Received event network-vif-plugged-c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.745287] env[62816]: DEBUG oslo_concurrency.lockutils [req-c7e6cb1f-5c6b-4b6d-a931-8392259131a0 req-fcdb87f5-41fc-422f-be5d-7406962e0c56 service nova] Acquiring lock "0a1a8539-940a-4a17-9826-82736be41892-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.745287] env[62816]: DEBUG oslo_concurrency.lockutils [req-c7e6cb1f-5c6b-4b6d-a931-8392259131a0 req-fcdb87f5-41fc-422f-be5d-7406962e0c56 service nova] Lock "0a1a8539-940a-4a17-9826-82736be41892-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.745457] env[62816]: DEBUG oslo_concurrency.lockutils [req-c7e6cb1f-5c6b-4b6d-a931-8392259131a0 req-fcdb87f5-41fc-422f-be5d-7406962e0c56 service nova] Lock "0a1a8539-940a-4a17-9826-82736be41892-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.745671] env[62816]: DEBUG nova.compute.manager [req-c7e6cb1f-5c6b-4b6d-a931-8392259131a0 req-fcdb87f5-41fc-422f-be5d-7406962e0c56 service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] No waiting events found dispatching network-vif-plugged-c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1469.745842] env[62816]: WARNING nova.compute.manager [req-c7e6cb1f-5c6b-4b6d-a931-8392259131a0 req-fcdb87f5-41fc-422f-be5d-7406962e0c56 service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Received unexpected event network-vif-plugged-c6e6822e-53bd-4c81-b715-3c3d6bef6f45 for instance with vm_state building and task_state spawning. [ 1469.752151] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4f4d6f-2360-404e-b421-5409a94da3d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.757509] env[62816]: DEBUG nova.network.neutron [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Successfully updated port: c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1469.766454] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.769345] env[62816]: DEBUG nova.compute.provider_tree [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1469.772443] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "refresh_cache-0a1a8539-940a-4a17-9826-82736be41892" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.772443] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "refresh_cache-0a1a8539-940a-4a17-9826-82736be41892" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.772443] env[62816]: DEBUG nova.network.neutron [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1469.810777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.811114] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.811342] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.811536] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1469.811707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.814046] env[62816]: INFO nova.compute.manager [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Terminating instance [ 1469.815631] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "refresh_cache-128bd207-a483-4b38-9fd4-4fb996ce1d0d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.815858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquired lock "refresh_cache-128bd207-a483-4b38-9fd4-4fb996ce1d0d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.815963] env[62816]: DEBUG nova.network.neutron [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1470.147099] env[62816]: INFO nova.compute.manager [None req-3a5df23a-d770-4954-904e-96d29905b0d4 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] instance snapshotting [ 1470.148085] env[62816]: DEBUG nova.objects.instance [None req-3a5df23a-d770-4954-904e-96d29905b0d4 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lazy-loading 'flavor' on Instance uuid 128bd207-a483-4b38-9fd4-4fb996ce1d0d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1470.293337] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.307773] env[62816]: DEBUG nova.scheduler.client.report [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 58 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1470.307982] env[62816]: DEBUG nova.compute.provider_tree [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 58 to 59 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1470.308186] env[62816]: DEBUG nova.compute.provider_tree [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1470.313150] env[62816]: DEBUG nova.network.neutron [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1470.338070] env[62816]: DEBUG nova.network.neutron [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1470.387339] env[62816]: DEBUG nova.network.neutron [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.454484] env[62816]: DEBUG nova.network.neutron [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Updating instance_info_cache with network_info: [{"id": "c6e6822e-53bd-4c81-b715-3c3d6bef6f45", "address": "fa:16:3e:1b:6e:2c", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e6822e-53", "ovs_interfaceid": "c6e6822e-53bd-4c81-b715-3c3d6bef6f45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.617053] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.618172] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.618172] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.618172] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.618172] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.620530] env[62816]: INFO nova.compute.manager [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Terminating instance [ 1470.622309] env[62816]: DEBUG nova.compute.manager [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1470.622573] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1470.623395] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643f777a-7f13-4139-9b1b-bbd36b2f3089 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.631729] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1470.631977] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7decb5c-0034-47c2-9f03-86886a4ee9a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.639117] env[62816]: DEBUG oslo_vmware.api [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1470.639117] env[62816]: value = "task-1788157" [ 1470.639117] env[62816]: _type = "Task" [ 1470.639117] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.647153] env[62816]: DEBUG oslo_vmware.api [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.654197] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de300455-bc71-4775-9156-fcdde762d22e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.669486] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d61ed6-de1b-42c9-8871-06ccbfa8f0be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.818358] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.807s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.818981] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1470.822379] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.362s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.822658] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1470.824833] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.038s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.826410] env[62816]: INFO nova.compute.claims [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1470.853478] env[62816]: INFO nova.scheduler.client.report [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Deleted allocations for instance 52670f9e-0cb7-4464-be9c-7b0d8346f60f [ 1470.891454] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Releasing lock "refresh_cache-128bd207-a483-4b38-9fd4-4fb996ce1d0d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.891917] env[62816]: DEBUG nova.compute.manager [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1470.892134] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1470.893080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c027adb8-b404-4f1d-9e64-f4c96a098439 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.902203] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1470.902448] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43ca26bb-529e-4782-a631-21037b96a489 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.909427] env[62816]: DEBUG oslo_vmware.api [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1470.909427] env[62816]: value = "task-1788158" [ 1470.909427] env[62816]: _type = "Task" [ 1470.909427] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.917442] env[62816]: DEBUG oslo_vmware.api [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788158, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.956774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "refresh_cache-0a1a8539-940a-4a17-9826-82736be41892" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.957149] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Instance network_info: |[{"id": "c6e6822e-53bd-4c81-b715-3c3d6bef6f45", "address": "fa:16:3e:1b:6e:2c", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e6822e-53", "ovs_interfaceid": "c6e6822e-53bd-4c81-b715-3c3d6bef6f45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1470.957575] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:6e:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6e6822e-53bd-4c81-b715-3c3d6bef6f45', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1470.965972] env[62816]: DEBUG oslo.service.loopingcall [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1470.966271] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1470.966956] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-12f4ecf6-8e42-400b-bceb-819064a6d7b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.989108] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1470.989108] env[62816]: value = "task-1788159" [ 1470.989108] env[62816]: _type = "Task" [ 1470.989108] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.996557] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788159, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.148760] env[62816]: DEBUG oslo_vmware.api [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788157, 'name': PowerOffVM_Task, 'duration_secs': 0.205971} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.149109] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1471.149325] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1471.149575] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1527a40-b5de-4e50-a7a4-20d6832bb3d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.180233] env[62816]: DEBUG nova.compute.manager [None req-3a5df23a-d770-4954-904e-96d29905b0d4 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Instance disappeared during snapshot {{(pid=62816) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1471.219438] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1471.219671] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1471.219856] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleting the datastore file [datastore1] a6b06048-6cdc-497e-8c5d-b6a26d3e7557 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1471.220206] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcbe8125-55ef-4ee2-ab3b-f41082548c9b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.228418] env[62816]: DEBUG oslo_vmware.api [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1471.228418] env[62816]: value = "task-1788161" [ 1471.228418] env[62816]: _type = "Task" [ 1471.228418] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.236177] env[62816]: DEBUG oslo_vmware.api [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.329226] env[62816]: DEBUG nova.compute.manager [None req-3a5df23a-d770-4954-904e-96d29905b0d4 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Found 0 images (rotation: 2) {{(pid=62816) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1471.331487] env[62816]: DEBUG nova.compute.utils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1471.335183] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1471.335183] env[62816]: DEBUG nova.network.neutron [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1471.363111] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a69de1cd-c000-4531-804d-fe08c769f721 tempest-ServerAddressesNegativeTestJSON-1283176955 tempest-ServerAddressesNegativeTestJSON-1283176955-project-member] Lock "52670f9e-0cb7-4464-be9c-7b0d8346f60f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.025s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.418393] env[62816]: DEBUG oslo_vmware.api [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788158, 'name': PowerOffVM_Task, 'duration_secs': 0.109862} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.418658] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1471.418820] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1471.419073] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee4e6a7d-62b4-45ff-bf10-70c396b14ad2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.421659] env[62816]: DEBUG nova.policy [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2f99144f3364fe2b298fc4b579d6be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e4a92683e3f457bb157966a92b48577', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1471.442427] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1471.443105] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1471.443105] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Deleting the datastore file [datastore1] 128bd207-a483-4b38-9fd4-4fb996ce1d0d {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1471.443105] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a434ee11-57fa-4b28-8b07-19551661afa1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.449369] env[62816]: DEBUG oslo_vmware.api [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for the task: (returnval){ [ 1471.449369] env[62816]: value = "task-1788163" [ 1471.449369] env[62816]: _type = "Task" [ 1471.449369] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.457573] env[62816]: DEBUG oslo_vmware.api [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.498894] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788159, 'name': CreateVM_Task, 'duration_secs': 0.356042} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.498894] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1471.499501] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.499721] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.500045] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1471.500293] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f100f6b-5a90-40b4-9da1-1e68af47754d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.505164] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1471.505164] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527284fb-9c8f-9ed5-aec9-f8143887856b" [ 1471.505164] env[62816]: _type = "Task" [ 1471.505164] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.514113] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527284fb-9c8f-9ed5-aec9-f8143887856b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.738212] env[62816]: DEBUG oslo_vmware.api [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17072} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.738518] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1471.738678] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1471.738850] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1471.739027] env[62816]: INFO nova.compute.manager [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1471.739335] env[62816]: DEBUG oslo.service.loopingcall [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1471.741017] env[62816]: DEBUG nova.compute.manager [-] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1471.741017] env[62816]: DEBUG nova.network.neutron [-] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1471.819149] env[62816]: DEBUG nova.compute.manager [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Received event network-changed-c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.819149] env[62816]: DEBUG nova.compute.manager [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Refreshing instance network info cache due to event network-changed-c6e6822e-53bd-4c81-b715-3c3d6bef6f45. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1471.819393] env[62816]: DEBUG oslo_concurrency.lockutils [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] Acquiring lock "refresh_cache-0a1a8539-940a-4a17-9826-82736be41892" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.819511] env[62816]: DEBUG oslo_concurrency.lockutils [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] Acquired lock "refresh_cache-0a1a8539-940a-4a17-9826-82736be41892" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.819926] env[62816]: DEBUG nova.network.neutron [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Refreshing network info cache for port c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1471.837847] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1471.965553] env[62816]: DEBUG oslo_vmware.api [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Task: {'id': task-1788163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098365} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.966161] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1471.966377] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1471.966559] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1471.966733] env[62816]: INFO nova.compute.manager [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1471.967010] env[62816]: DEBUG oslo.service.loopingcall [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1471.967229] env[62816]: DEBUG nova.compute.manager [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1471.967325] env[62816]: DEBUG nova.network.neutron [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1471.997800] env[62816]: DEBUG nova.network.neutron [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1472.021832] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527284fb-9c8f-9ed5-aec9-f8143887856b, 'name': SearchDatastore_Task, 'duration_secs': 0.01038} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.024831] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.025128] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1472.025538] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.025756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.025984] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1472.026800] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3dca1a2-9c23-4efe-8bf8-f607339df0f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.036406] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1472.036630] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1472.037415] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ecb3a098-cf01-4460-bc70-1c58d87204dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.045585] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1472.045585] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52790182-f39b-38a6-f55c-a776fc377893" [ 1472.045585] env[62816]: _type = "Task" [ 1472.045585] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.056929] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52790182-f39b-38a6-f55c-a776fc377893, 'name': SearchDatastore_Task, 'duration_secs': 0.009472} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.057761] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b7cb4de-f342-42aa-8a64-c782a0ac3a49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.063810] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1472.063810] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5212c8dc-5e32-f942-2301-7d7148afbea8" [ 1472.063810] env[62816]: _type = "Task" [ 1472.063810] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.076847] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5212c8dc-5e32-f942-2301-7d7148afbea8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.081869] env[62816]: DEBUG nova.compute.manager [req-5b4261fa-8786-42a9-9621-d080204938b3 req-216f8406-3dab-4f67-bf62-2eed2d3b1c54 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Received event network-vif-deleted-36672ebf-8330-4f63-8a9e-840cab593685 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1472.082073] env[62816]: INFO nova.compute.manager [req-5b4261fa-8786-42a9-9621-d080204938b3 req-216f8406-3dab-4f67-bf62-2eed2d3b1c54 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Neutron deleted interface 36672ebf-8330-4f63-8a9e-840cab593685; detaching it from the instance and deleting it from the info cache [ 1472.082248] env[62816]: DEBUG nova.network.neutron [req-5b4261fa-8786-42a9-9621-d080204938b3 req-216f8406-3dab-4f67-bf62-2eed2d3b1c54 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.114594] env[62816]: DEBUG nova.network.neutron [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Successfully created port: 1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.501965] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c643762-ec4c-4fd8-8a50-93b0ce552dd9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.504739] env[62816]: DEBUG nova.network.neutron [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.510931] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f98efaf-554c-4a97-891f-a75e5a99bd88 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.547319] env[62816]: DEBUG nova.network.neutron [-] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.553862] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90c46ff-c119-4321-8853-8d9be11965c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.562699] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ecab04-cf9c-4c07-82cb-f374c4a75bbf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.581042] env[62816]: DEBUG nova.compute.provider_tree [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1472.586085] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5212c8dc-5e32-f942-2301-7d7148afbea8, 'name': SearchDatastore_Task, 'duration_secs': 0.009319} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.586580] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.586798] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0a1a8539-940a-4a17-9826-82736be41892/0a1a8539-940a-4a17-9826-82736be41892.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1472.586933] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-df1f7407-56b4-4acd-a09c-429685d7628b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.589496] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8810099a-67b9-43a4-8e46-7d2e05c8bca4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.601772] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725947ff-f876-4de2-8ded-455fe01e81e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.614257] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1472.614257] env[62816]: value = "task-1788164" [ 1472.614257] env[62816]: _type = "Task" [ 1472.614257] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.620909] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788164, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.632219] env[62816]: DEBUG nova.compute.manager [req-5b4261fa-8786-42a9-9621-d080204938b3 req-216f8406-3dab-4f67-bf62-2eed2d3b1c54 service nova] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Detach interface failed, port_id=36672ebf-8330-4f63-8a9e-840cab593685, reason: Instance a6b06048-6cdc-497e-8c5d-b6a26d3e7557 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1472.711458] env[62816]: DEBUG nova.network.neutron [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Updated VIF entry in instance network info cache for port c6e6822e-53bd-4c81-b715-3c3d6bef6f45. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1472.711857] env[62816]: DEBUG nova.network.neutron [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Updating instance_info_cache with network_info: [{"id": "c6e6822e-53bd-4c81-b715-3c3d6bef6f45", "address": "fa:16:3e:1b:6e:2c", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6e6822e-53", "ovs_interfaceid": "c6e6822e-53bd-4c81-b715-3c3d6bef6f45", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.848396] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1472.877023] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1472.877331] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1472.877495] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1472.877680] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1472.877826] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1472.879952] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1472.879952] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1472.879952] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1472.879952] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1472.879952] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1472.879952] env[62816]: DEBUG nova.virt.hardware [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1472.879952] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50cb648-8f74-4a8c-ad9e-c9dd23e91353 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.888721] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec74d135-4fe9-44c9-ae36-b8aeb9fe066b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.008577] env[62816]: INFO nova.compute.manager [-] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Took 1.04 seconds to deallocate network for instance. [ 1473.052151] env[62816]: INFO nova.compute.manager [-] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Took 1.31 seconds to deallocate network for instance. [ 1473.125037] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788164, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.126336] env[62816]: DEBUG nova.scheduler.client.report [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1473.126596] env[62816]: DEBUG nova.compute.provider_tree [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 59 to 60 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1473.126775] env[62816]: DEBUG nova.compute.provider_tree [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1473.214500] env[62816]: DEBUG oslo_concurrency.lockutils [req-b1a5e30b-2fbc-4a5a-a5ef-a3edda09e456 req-9a755e8e-7b31-4561-b51f-a08207d71d9e service nova] Releasing lock "refresh_cache-0a1a8539-940a-4a17-9826-82736be41892" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.518776] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.561192] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.624813] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788164, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61277} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.625156] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0a1a8539-940a-4a17-9826-82736be41892/0a1a8539-940a-4a17-9826-82736be41892.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1473.625386] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1473.625633] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a552abe-1831-49b4-afd2-b7c767f7e00d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.632897] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.808s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.633403] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1473.636892] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1473.636892] env[62816]: value = "task-1788165" [ 1473.636892] env[62816]: _type = "Task" [ 1473.636892] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.637445] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.589s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.637617] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.640422] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.978s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.640611] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.643266] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.126s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.643462] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.645191] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.921s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.647039] env[62816]: INFO nova.compute.claims [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1473.660169] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788165, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.677141] env[62816]: INFO nova.scheduler.client.report [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Deleted allocations for instance f06102d6-be5c-40d1-ae1d-8ae8190fd0d7 [ 1473.677463] env[62816]: INFO nova.scheduler.client.report [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Deleted allocations for instance 914b187f-b05f-49d4-bf61-d536ef61934d [ 1473.703750] env[62816]: INFO nova.scheduler.client.report [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleted allocations for instance 4a6ac464-a5e0-4ed6-909d-f1730be14380 [ 1473.847597] env[62816]: DEBUG nova.network.neutron [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Successfully updated port: 1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1473.851851] env[62816]: DEBUG nova.compute.manager [req-2fe9ef37-657e-439d-bd37-964f1de7b2dc req-b0cf226e-ae83-4b45-8fe7-62ba58d79ea0 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Received event network-vif-plugged-1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1473.851851] env[62816]: DEBUG oslo_concurrency.lockutils [req-2fe9ef37-657e-439d-bd37-964f1de7b2dc req-b0cf226e-ae83-4b45-8fe7-62ba58d79ea0 service nova] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1473.851851] env[62816]: DEBUG oslo_concurrency.lockutils [req-2fe9ef37-657e-439d-bd37-964f1de7b2dc req-b0cf226e-ae83-4b45-8fe7-62ba58d79ea0 service nova] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1473.851851] env[62816]: DEBUG oslo_concurrency.lockutils [req-2fe9ef37-657e-439d-bd37-964f1de7b2dc req-b0cf226e-ae83-4b45-8fe7-62ba58d79ea0 service nova] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.851851] env[62816]: DEBUG nova.compute.manager [req-2fe9ef37-657e-439d-bd37-964f1de7b2dc req-b0cf226e-ae83-4b45-8fe7-62ba58d79ea0 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] No waiting events found dispatching network-vif-plugged-1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1473.851851] env[62816]: WARNING nova.compute.manager [req-2fe9ef37-657e-439d-bd37-964f1de7b2dc req-b0cf226e-ae83-4b45-8fe7-62ba58d79ea0 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Received unexpected event network-vif-plugged-1110b9ce-766b-4ab4-b75f-4e0139f78297 for instance with vm_state building and task_state spawning. [ 1474.151789] env[62816]: DEBUG nova.compute.utils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.157912] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1474.158454] env[62816]: DEBUG nova.network.neutron [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.168902] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788165, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064138} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.169907] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1474.170757] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770775d0-8e9d-43bd-852e-b5528899761e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.204303] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 0a1a8539-940a-4a17-9826-82736be41892/0a1a8539-940a-4a17-9826-82736be41892.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1474.205455] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b23f257-c6f4-473e-bc1e-11d239890658 tempest-ServerShowV247Test-319141433 tempest-ServerShowV247Test-319141433-project-member] Lock "f06102d6-be5c-40d1-ae1d-8ae8190fd0d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.449s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.206839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-431ee2b2-170a-44e1-b4f5-b69a51823c8d tempest-ServerMetadataNegativeTestJSON-1302890562 tempest-ServerMetadataNegativeTestJSON-1302890562-project-member] Lock "914b187f-b05f-49d4-bf61-d536ef61934d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.480s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.211382] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-091f2bf1-735c-49f7-b763-b01ef08cb126 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.226403] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53821ec6-2c81-4792-a3d9-5d2b07f9dc18 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.364s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.229707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 38.044s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.229707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.229707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.229707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.232506] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1474.232506] env[62816]: value = "task-1788166" [ 1474.232506] env[62816]: _type = "Task" [ 1474.232506] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.233086] env[62816]: INFO nova.compute.manager [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Terminating instance [ 1474.234638] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.234800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquired lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.234947] env[62816]: DEBUG nova.network.neutron [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.246789] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788166, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.258013] env[62816]: DEBUG nova.policy [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5616501cbe4643cbb6b63da8a1fb5109', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d3199e2925649fab11e2ef7f043c8f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.349816] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.350539] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.350771] env[62816]: DEBUG nova.network.neutron [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1474.661448] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1474.740815] env[62816]: DEBUG nova.compute.utils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Can not refresh info_cache because instance was not found {{(pid=62816) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1474.758821] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788166, 'name': ReconfigVM_Task, 'duration_secs': 0.450741} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.758821] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 0a1a8539-940a-4a17-9826-82736be41892/0a1a8539-940a-4a17-9826-82736be41892.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1474.759378] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3996e6b-71b9-477f-a274-211fe0461e59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.771610] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1474.771610] env[62816]: value = "task-1788167" [ 1474.771610] env[62816]: _type = "Task" [ 1474.771610] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.773587] env[62816]: DEBUG nova.network.neutron [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1474.779385] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788167, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.898854] env[62816]: DEBUG nova.network.neutron [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1474.902626] env[62816]: DEBUG nova.network.neutron [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Successfully created port: 11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1475.012340] env[62816]: DEBUG nova.network.neutron [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.197028] env[62816]: DEBUG nova.network.neutron [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [{"id": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "address": "fa:16:3e:d6:81:d5", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1110b9ce-76", "ovs_interfaceid": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.284171] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788167, 'name': Rename_Task, 'duration_secs': 0.133382} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.284171] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1475.285366] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7f5d22b-9b26-43ca-ae33-d187530813f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.292581] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1475.292581] env[62816]: value = "task-1788168" [ 1475.292581] env[62816]: _type = "Task" [ 1475.292581] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.300936] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788168, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.330782] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb7d1b3-e821-48a4-98d3-8e3bb2c2691e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.339872] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e077c1e3-7c69-46a6-a705-a97b05122601 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.372137] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8262580f-b62f-4b03-b3ca-041bf402aab0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.380296] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dcf043a-43bd-4911-beaf-976db5e547e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.395283] env[62816]: DEBUG nova.compute.provider_tree [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1475.517858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Releasing lock "refresh_cache-4a6ac464-a5e0-4ed6-909d-f1730be14380" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.518315] env[62816]: DEBUG nova.compute.manager [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1475.518506] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1475.518804] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a3f81f10-778c-45d0-a234-dc0e2fe43fc5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.528675] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a7bca5-3674-4ed2-b6dd-8a46107b980c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.562045] env[62816]: WARNING nova.virt.vmwareapi.vmops [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4a6ac464-a5e0-4ed6-909d-f1730be14380 could not be found. [ 1475.562278] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1475.563383] env[62816]: INFO nova.compute.manager [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1475.563383] env[62816]: DEBUG oslo.service.loopingcall [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1475.563383] env[62816]: DEBUG nova.compute.manager [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1475.563383] env[62816]: DEBUG nova.network.neutron [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1475.593941] env[62816]: DEBUG nova.network.neutron [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.677096] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1475.699338] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.699655] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Instance network_info: |[{"id": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "address": "fa:16:3e:d6:81:d5", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1110b9ce-76", "ovs_interfaceid": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1475.700738] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:81:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1110b9ce-766b-4ab4-b75f-4e0139f78297', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1475.712712] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Creating folder: Project (7e4a92683e3f457bb157966a92b48577). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1475.715866] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1475.716186] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1475.716434] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1475.716695] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1475.716902] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1475.717112] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1475.717384] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1475.717680] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1475.717815] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1475.718063] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1475.718315] env[62816]: DEBUG nova.virt.hardware [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1475.718735] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c6fd6500-17fd-4c94-a12b-465ad1f4ff92 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.721735] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6265c858-e3e0-4024-a4b5-ed08483acd35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.731041] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63f8971-6768-44b6-8123-d235cbc1d59b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.737074] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Created folder: Project (7e4a92683e3f457bb157966a92b48577) in parent group-v370905. [ 1475.737074] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Creating folder: Instances. Parent ref: group-v370991. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1475.737716] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d257c667-830c-4613-af1c-69e3905b9e9d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.755584] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Created folder: Instances in parent group-v370991. [ 1475.755840] env[62816]: DEBUG oslo.service.loopingcall [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1475.756055] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1475.756262] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bd3cf5b-7257-4e69-8cd7-2fec96b5e239 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.781604] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1475.781604] env[62816]: value = "task-1788171" [ 1475.781604] env[62816]: _type = "Task" [ 1475.781604] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.797036] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788171, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.810348] env[62816]: DEBUG oslo_vmware.api [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788168, 'name': PowerOnVM_Task, 'duration_secs': 0.469958} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.812137] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.812398] env[62816]: INFO nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Took 6.77 seconds to spawn the instance on the hypervisor. [ 1475.812645] env[62816]: DEBUG nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1475.813491] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d5e5407-2264-47c4-88fb-9edf47208a3d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.936246] env[62816]: DEBUG nova.scheduler.client.report [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 60 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1475.936246] env[62816]: DEBUG nova.compute.provider_tree [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 60 to 61 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1475.936246] env[62816]: DEBUG nova.compute.provider_tree [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1475.972314] env[62816]: DEBUG nova.compute.manager [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Received event network-changed-1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1475.972314] env[62816]: DEBUG nova.compute.manager [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Refreshing instance network info cache due to event network-changed-1110b9ce-766b-4ab4-b75f-4e0139f78297. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1475.972314] env[62816]: DEBUG oslo_concurrency.lockutils [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] Acquiring lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1475.972314] env[62816]: DEBUG oslo_concurrency.lockutils [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] Acquired lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1475.972552] env[62816]: DEBUG nova.network.neutron [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Refreshing network info cache for port 1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.096558] env[62816]: DEBUG nova.network.neutron [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.280503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "99bd7579-7097-41df-a8c0-e12a3863a3dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.280837] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.281560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "99bd7579-7097-41df-a8c0-e12a3863a3dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1476.281560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.281560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.288399] env[62816]: INFO nova.compute.manager [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Terminating instance [ 1476.289882] env[62816]: DEBUG nova.compute.manager [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1476.289882] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1476.290407] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2ef2a7-011b-414f-900d-f9c7dfa8cfff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.296605] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788171, 'name': CreateVM_Task, 'duration_secs': 0.426269} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.298937] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1476.299676] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.299843] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.300184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1476.303508] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c22fbbac-c680-4955-86b3-8c845ac1792f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.305899] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1476.306145] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9dfe8d14-ca3b-4981-9af1-1e913b1e19e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.312149] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1476.312149] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521d7f91-c378-1cbc-3f73-9009a67bfe08" [ 1476.312149] env[62816]: _type = "Task" [ 1476.312149] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.312947] env[62816]: DEBUG oslo_vmware.api [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1476.312947] env[62816]: value = "task-1788172" [ 1476.312947] env[62816]: _type = "Task" [ 1476.312947] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.329240] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521d7f91-c378-1cbc-3f73-9009a67bfe08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.332623] env[62816]: DEBUG oslo_vmware.api [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.335816] env[62816]: INFO nova.compute.manager [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Took 49.32 seconds to build instance. [ 1476.440553] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.795s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.441099] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1476.443873] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.935s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.445309] env[62816]: INFO nova.compute.claims [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.598593] env[62816]: INFO nova.compute.manager [-] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Took 1.04 seconds to deallocate network for instance. [ 1476.829124] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521d7f91-c378-1cbc-3f73-9009a67bfe08, 'name': SearchDatastore_Task, 'duration_secs': 0.038753} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.832544] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.832899] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1476.833188] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.834490] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.834490] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1476.834490] env[62816]: DEBUG oslo_vmware.api [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788172, 'name': PowerOffVM_Task, 'duration_secs': 0.279237} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.834490] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7c9f641-5a3b-469d-8a52-237a12fecbd0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.836488] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1476.836713] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1476.837008] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c85d2bdf-9022-4db0-819f-627dd06a9eca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.840994] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41f6201c-a5f4-486f-b049-2cf22450d771 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0a1a8539-940a-4a17-9826-82736be41892" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.926s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.841779] env[62816]: DEBUG nova.network.neutron [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updated VIF entry in instance network info cache for port 1110b9ce-766b-4ab4-b75f-4e0139f78297. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1476.842148] env[62816]: DEBUG nova.network.neutron [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [{"id": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "address": "fa:16:3e:d6:81:d5", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1110b9ce-76", "ovs_interfaceid": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1476.849668] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1476.849857] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1476.850566] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce4a446f-e758-4f65-a00f-8d0bc09c71af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.855979] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1476.855979] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cc0262-d64f-10bf-e65b-a4d9146bbf65" [ 1476.855979] env[62816]: _type = "Task" [ 1476.855979] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.866532] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cc0262-d64f-10bf-e65b-a4d9146bbf65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.895401] env[62816]: DEBUG nova.network.neutron [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Successfully updated port: 11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.918538] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1476.918771] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1476.919753] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleting the datastore file [datastore1] 99bd7579-7097-41df-a8c0-e12a3863a3dc {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1476.919753] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65ac7dff-4462-47e8-b125-a6e3712f1919 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.927064] env[62816]: DEBUG oslo_vmware.api [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for the task: (returnval){ [ 1476.927064] env[62816]: value = "task-1788174" [ 1476.927064] env[62816]: _type = "Task" [ 1476.927064] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.935764] env[62816]: DEBUG oslo_vmware.api [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.954024] env[62816]: DEBUG nova.compute.utils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1476.958435] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1476.958673] env[62816]: DEBUG nova.network.neutron [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1477.008532] env[62816]: DEBUG nova.policy [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b66d12f51d54e09be08d8908ae6d2e2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6456bdfcb5a44d3a8514fe13e8af920', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1477.106296] env[62816]: INFO nova.compute.manager [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance disappeared during terminate [ 1477.106737] env[62816]: DEBUG oslo_concurrency.lockutils [None req-31f3334b-b3b7-48fe-b456-4e3256c5c251 tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "4a6ac464-a5e0-4ed6-909d-f1730be14380" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.879s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.346731] env[62816]: DEBUG oslo_concurrency.lockutils [req-91714112-b4f2-4bc7-ab29-05dc7c9beaa8 req-a8b0a14b-2305-4143-b0fb-b06d56fbf330 service nova] Releasing lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.347271] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1477.373133] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cc0262-d64f-10bf-e65b-a4d9146bbf65, 'name': SearchDatastore_Task, 'duration_secs': 0.015423} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.374188] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb850a89-6b52-4b24-9c9a-3980d86d6014 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.380594] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1477.380594] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5266deaf-6fbf-a1cf-3e34-5eea60074f53" [ 1477.380594] env[62816]: _type = "Task" [ 1477.380594] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.390089] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5266deaf-6fbf-a1cf-3e34-5eea60074f53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.399373] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "refresh_cache-e1067d45-1938-4021-b902-21a1aa57058a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.399522] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired lock "refresh_cache-e1067d45-1938-4021-b902-21a1aa57058a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.399668] env[62816]: DEBUG nova.network.neutron [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.436792] env[62816]: DEBUG oslo_vmware.api [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Task: {'id': task-1788174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.21267} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.437162] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1477.438030] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1477.438030] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1477.438030] env[62816]: INFO nova.compute.manager [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1477.439458] env[62816]: DEBUG oslo.service.loopingcall [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1477.439712] env[62816]: DEBUG nova.compute.manager [-] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1477.439944] env[62816]: DEBUG nova.network.neutron [-] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1477.462412] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1477.672903] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.673162] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.691538] env[62816]: DEBUG nova.network.neutron [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Successfully created port: 89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1477.875282] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.891689] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5266deaf-6fbf-a1cf-3e34-5eea60074f53, 'name': SearchDatastore_Task, 'duration_secs': 0.016756} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.896176] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.896539] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1c3392d3-cfb0-47c6-9366-8c363ad21297/1c3392d3-cfb0-47c6-9366-8c363ad21297.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1477.897024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-077db139-c2a7-4ae3-9398-75879d788a5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.906224] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1477.906224] env[62816]: value = "task-1788175" [ 1477.906224] env[62816]: _type = "Task" [ 1477.906224] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.919559] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788175, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.950584] env[62816]: DEBUG nova.network.neutron [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1478.008045] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e51843-67ff-47c7-9868-bbe17e50e783 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.018728] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13838396-2751-4189-b452-22fcc4e5d1a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.032110] env[62816]: DEBUG nova.compute.manager [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Received event network-vif-plugged-11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.032110] env[62816]: DEBUG oslo_concurrency.lockutils [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] Acquiring lock "e1067d45-1938-4021-b902-21a1aa57058a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.032110] env[62816]: DEBUG oslo_concurrency.lockutils [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] Lock "e1067d45-1938-4021-b902-21a1aa57058a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.032110] env[62816]: DEBUG oslo_concurrency.lockutils [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] Lock "e1067d45-1938-4021-b902-21a1aa57058a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.032110] env[62816]: DEBUG nova.compute.manager [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] No waiting events found dispatching network-vif-plugged-11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.032110] env[62816]: WARNING nova.compute.manager [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Received unexpected event network-vif-plugged-11c87595-6807-405a-ac5b-7099ec0d0bab for instance with vm_state building and task_state spawning. [ 1478.032110] env[62816]: DEBUG nova.compute.manager [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Received event network-changed-11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.032110] env[62816]: DEBUG nova.compute.manager [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Refreshing instance network info cache due to event network-changed-11c87595-6807-405a-ac5b-7099ec0d0bab. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1478.032110] env[62816]: DEBUG oslo_concurrency.lockutils [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] Acquiring lock "refresh_cache-e1067d45-1938-4021-b902-21a1aa57058a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.061508] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef7d346-265b-41bd-8367-135a3c903ab3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.070679] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f297650-85f9-4447-ad67-02d859082311 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.089182] env[62816]: DEBUG nova.compute.provider_tree [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1478.181680] env[62816]: DEBUG nova.network.neutron [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Updating instance_info_cache with network_info: [{"id": "11c87595-6807-405a-ac5b-7099ec0d0bab", "address": "fa:16:3e:3b:f3:22", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11c87595-68", "ovs_interfaceid": "11c87595-6807-405a-ac5b-7099ec0d0bab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.418177] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788175, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.475275] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1478.502941] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1478.503243] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1478.503401] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1478.503584] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1478.503740] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1478.503921] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1478.504168] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1478.504332] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1478.504501] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1478.504666] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1478.504842] env[62816]: DEBUG nova.virt.hardware [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1478.505751] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70812385-763c-4cd9-8453-96067552dd2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.516694] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ffabba-4958-483f-80ff-3aa5174b07ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.530107] env[62816]: DEBUG nova.network.neutron [-] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.614072] env[62816]: ERROR nova.scheduler.client.report [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [req-68c518b0-7aea-4779-8144-7fb4466f2525] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-68c518b0-7aea-4779-8144-7fb4466f2525"}]} [ 1478.632861] env[62816]: DEBUG nova.scheduler.client.report [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1478.646922] env[62816]: DEBUG nova.scheduler.client.report [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1478.647754] env[62816]: DEBUG nova.compute.provider_tree [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1478.661996] env[62816]: DEBUG nova.scheduler.client.report [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1478.684572] env[62816]: DEBUG nova.scheduler.client.report [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1478.686927] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Releasing lock "refresh_cache-e1067d45-1938-4021-b902-21a1aa57058a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.688206] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Instance network_info: |[{"id": "11c87595-6807-405a-ac5b-7099ec0d0bab", "address": "fa:16:3e:3b:f3:22", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11c87595-68", "ovs_interfaceid": "11c87595-6807-405a-ac5b-7099ec0d0bab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1478.688994] env[62816]: DEBUG oslo_concurrency.lockutils [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] Acquired lock "refresh_cache-e1067d45-1938-4021-b902-21a1aa57058a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.689204] env[62816]: DEBUG nova.network.neutron [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Refreshing network info cache for port 11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.690467] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:f3:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '11c87595-6807-405a-ac5b-7099ec0d0bab', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.699754] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating folder: Project (4d3199e2925649fab11e2ef7f043c8f5). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1478.699754] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3f6709d-77d4-4fc1-87a6-0421b9017c54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.709088] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Created folder: Project (4d3199e2925649fab11e2ef7f043c8f5) in parent group-v370905. [ 1478.709211] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating folder: Instances. Parent ref: group-v370994. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1478.710049] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-88678f30-8bdd-4ef9-87eb-dfee9c22a05e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.720444] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Created folder: Instances in parent group-v370994. [ 1478.720775] env[62816]: DEBUG oslo.service.loopingcall [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.725078] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.725531] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f8bbcba9-9137-46fc-8ae6-890a470140df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.748234] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.748234] env[62816]: value = "task-1788178" [ 1478.748234] env[62816]: _type = "Task" [ 1478.748234] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.757856] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788178, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.922995] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788175, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.034546] env[62816]: INFO nova.compute.manager [-] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Took 1.59 seconds to deallocate network for instance. [ 1479.166924] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56448949-63f7-4732-9f13-253cc190db7d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.175550] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24dd420-fda2-41ee-9916-fa6855a8beef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.214619] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fb0990-59d3-4ed0-928c-38ac34061f64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.222822] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d5dd33-0a87-4512-a4c6-1c7648b51767 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.237280] env[62816]: DEBUG nova.compute.provider_tree [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1479.261568] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788178, 'name': CreateVM_Task, 'duration_secs': 0.367271} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.261568] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.261568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.261568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.261568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.261568] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5886c06-f503-46b8-98a5-3cc780a5140b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.265343] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1479.265343] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52087dee-c7ec-f3a7-34a0-722aebcafd4f" [ 1479.265343] env[62816]: _type = "Task" [ 1479.265343] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.273172] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52087dee-c7ec-f3a7-34a0-722aebcafd4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.420711] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788175, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.483883] env[62816]: DEBUG nova.network.neutron [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Updated VIF entry in instance network info cache for port 11c87595-6807-405a-ac5b-7099ec0d0bab. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.484276] env[62816]: DEBUG nova.network.neutron [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Updating instance_info_cache with network_info: [{"id": "11c87595-6807-405a-ac5b-7099ec0d0bab", "address": "fa:16:3e:3b:f3:22", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap11c87595-68", "ovs_interfaceid": "11c87595-6807-405a-ac5b-7099ec0d0bab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.541832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.777221] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52087dee-c7ec-f3a7-34a0-722aebcafd4f, 'name': SearchDatastore_Task, 'duration_secs': 0.07447} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.777406] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1479.777483] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1479.778084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.778257] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.778448] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1479.779481] env[62816]: DEBUG nova.scheduler.client.report [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 62 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1479.779709] env[62816]: DEBUG nova.compute.provider_tree [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 62 to 63 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1479.779892] env[62816]: DEBUG nova.compute.provider_tree [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1479.783435] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bba7fc16-8ea2-4e57-b19c-086c07d13631 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.793517] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1479.793863] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1479.794464] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6979687-cce6-4c96-934a-cb5ee9059491 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.800667] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1479.800667] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523ddf91-62fe-8af1-da7f-b7971f6dce58" [ 1479.800667] env[62816]: _type = "Task" [ 1479.800667] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.810494] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523ddf91-62fe-8af1-da7f-b7971f6dce58, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.915299] env[62816]: DEBUG nova.network.neutron [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Successfully updated port: 89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1479.923370] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788175, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.682809} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.927020] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1c3392d3-cfb0-47c6-9366-8c363ad21297/1c3392d3-cfb0-47c6-9366-8c363ad21297.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1479.927020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1479.927020] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d809dc41-a1d4-4ba3-88cd-c5e16a039789 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.938284] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1479.938284] env[62816]: value = "task-1788179" [ 1479.938284] env[62816]: _type = "Task" [ 1479.938284] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.949321] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.991018] env[62816]: DEBUG oslo_concurrency.lockutils [req-73cb3303-a91f-45ad-b2c9-4a2f2eb5615f req-46843738-deff-4733-838d-dbd76714d945 service nova] Releasing lock "refresh_cache-e1067d45-1938-4021-b902-21a1aa57058a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.071897] env[62816]: DEBUG nova.compute.manager [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Received event network-vif-deleted-1443be92-279c-4376-8c5d-2dff1bb3f82f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.071897] env[62816]: DEBUG nova.compute.manager [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Received event network-vif-plugged-89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.071897] env[62816]: DEBUG oslo_concurrency.lockutils [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] Acquiring lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.071897] env[62816]: DEBUG oslo_concurrency.lockutils [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.071897] env[62816]: DEBUG oslo_concurrency.lockutils [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.071897] env[62816]: DEBUG nova.compute.manager [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] No waiting events found dispatching network-vif-plugged-89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1480.072626] env[62816]: WARNING nova.compute.manager [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Received unexpected event network-vif-plugged-89c39d07-acd3-4f92-a168-921d07739ac6 for instance with vm_state building and task_state spawning. [ 1480.072626] env[62816]: DEBUG nova.compute.manager [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Received event network-changed-89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.072626] env[62816]: DEBUG nova.compute.manager [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Refreshing instance network info cache due to event network-changed-89c39d07-acd3-4f92-a168-921d07739ac6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1480.072626] env[62816]: DEBUG oslo_concurrency.lockutils [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] Acquiring lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.072845] env[62816]: DEBUG oslo_concurrency.lockutils [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] Acquired lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.072845] env[62816]: DEBUG nova.network.neutron [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Refreshing network info cache for port 89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1480.289995] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.846s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.290580] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1480.293885] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.702s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.295351] env[62816]: INFO nova.compute.claims [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.310728] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523ddf91-62fe-8af1-da7f-b7971f6dce58, 'name': SearchDatastore_Task, 'duration_secs': 0.008686} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.311676] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04443e8f-368d-4b98-9cf3-82a6455747f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.318308] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1480.318308] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52452586-154f-7fb9-f030-f77184733170" [ 1480.318308] env[62816]: _type = "Task" [ 1480.318308] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.329614] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52452586-154f-7fb9-f030-f77184733170, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.425332] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.447914] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066374} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.448595] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1480.449395] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4675401a-0b54-437f-8902-f2a66715c7db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.472141] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 1c3392d3-cfb0-47c6-9366-8c363ad21297/1c3392d3-cfb0-47c6-9366-8c363ad21297.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1480.472454] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df5c4a7a-f90f-4b91-826b-778ecdff3514 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.492044] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1480.492044] env[62816]: value = "task-1788180" [ 1480.492044] env[62816]: _type = "Task" [ 1480.492044] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.504921] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788180, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.610069] env[62816]: DEBUG nova.network.neutron [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1480.706059] env[62816]: DEBUG nova.network.neutron [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.799019] env[62816]: DEBUG nova.compute.utils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1480.799019] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1480.799019] env[62816]: DEBUG nova.network.neutron [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1480.830418] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52452586-154f-7fb9-f030-f77184733170, 'name': SearchDatastore_Task, 'duration_secs': 0.044818} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.830418] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.830568] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e1067d45-1938-4021-b902-21a1aa57058a/e1067d45-1938-4021-b902-21a1aa57058a.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1480.830735] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c986fd54-d93a-46e4-af6b-db61a1aa4983 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.838739] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1480.838739] env[62816]: value = "task-1788181" [ 1480.838739] env[62816]: _type = "Task" [ 1480.838739] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.847464] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788181, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.874748] env[62816]: DEBUG nova.policy [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5616501cbe4643cbb6b63da8a1fb5109', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d3199e2925649fab11e2ef7f043c8f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1481.005138] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788180, 'name': ReconfigVM_Task, 'duration_secs': 0.385988} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.005426] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 1c3392d3-cfb0-47c6-9366-8c363ad21297/1c3392d3-cfb0-47c6-9366-8c363ad21297.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1481.006101] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8e5f821b-4bcc-477c-bd1f-f53ed7ee6e98 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.012177] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1481.012177] env[62816]: value = "task-1788182" [ 1481.012177] env[62816]: _type = "Task" [ 1481.012177] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.021263] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788182, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.212733] env[62816]: DEBUG oslo_concurrency.lockutils [req-c88e579d-454f-4e2f-9ca5-15155518d76f req-7762bdea-25ff-4a8e-b2a8-3aa429499af7 service nova] Releasing lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.213491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquired lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.213893] env[62816]: DEBUG nova.network.neutron [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1481.261288] env[62816]: DEBUG nova.network.neutron [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Successfully created port: 9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1481.303098] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1481.354896] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788181, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.529579] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788182, 'name': Rename_Task, 'duration_secs': 0.438188} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.529579] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1481.529579] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c4f7f26-1e85-4e83-97a7-575298f5efdb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.536605] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1481.536605] env[62816]: value = "task-1788183" [ 1481.536605] env[62816]: _type = "Task" [ 1481.536605] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.548325] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788183, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.747559] env[62816]: DEBUG nova.network.neutron [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1481.852052] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788181, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571162} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.854288] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e1067d45-1938-4021-b902-21a1aa57058a/e1067d45-1938-4021-b902-21a1aa57058a.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1481.854511] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1481.855550] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64411971-69d1-4462-928e-dceaca30a68e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.863759] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1481.863759] env[62816]: value = "task-1788184" [ 1481.863759] env[62816]: _type = "Task" [ 1481.863759] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.871879] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.873527] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f307947d-2192-41a8-91d9-f735e5ccc2ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.880529] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83389a59-fe8b-4c6b-abd8-9731a2ef3c76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.913065] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6bde31-17c2-414e-b757-cec284c9f4d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.921038] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c803c2-4a80-40d2-a9c7-a583ffcc7be6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.936563] env[62816]: DEBUG nova.compute.provider_tree [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1481.969865] env[62816]: DEBUG nova.network.neutron [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Updating instance_info_cache with network_info: [{"id": "89c39d07-acd3-4f92-a168-921d07739ac6", "address": "fa:16:3e:70:eb:28", "network": {"id": "50096a05-9b12-4e0c-99c7-a975626057ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-210536879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6456bdfcb5a44d3a8514fe13e8af920", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c39d07-ac", "ovs_interfaceid": "89c39d07-acd3-4f92-a168-921d07739ac6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.047308] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788183, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.318850] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1482.345770] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1482.346166] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1482.346433] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1482.346757] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1482.347016] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1482.347325] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1482.347612] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1482.347875] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1482.348169] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1482.348441] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1482.348723] env[62816]: DEBUG nova.virt.hardware [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1482.349973] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2821996-18b5-4598-8b37-518277e91803 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.358557] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5086a389-d5e8-42df-9437-9b46cb28529e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.380397] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080961} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.380672] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1482.381478] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517ea9cf-8775-434d-ae2f-048dc6becfa8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.406927] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] e1067d45-1938-4021-b902-21a1aa57058a/e1067d45-1938-4021-b902-21a1aa57058a.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1482.407123] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2035b24-5fad-413b-8f97-804c7ea909e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.428765] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1482.428765] env[62816]: value = "task-1788185" [ 1482.428765] env[62816]: _type = "Task" [ 1482.428765] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.437671] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.457664] env[62816]: ERROR nova.scheduler.client.report [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [req-dee2afd2-a4cc-4248-b81f-63f57d4b9065] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dee2afd2-a4cc-4248-b81f-63f57d4b9065"}]} [ 1482.472586] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Releasing lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1482.472995] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Instance network_info: |[{"id": "89c39d07-acd3-4f92-a168-921d07739ac6", "address": "fa:16:3e:70:eb:28", "network": {"id": "50096a05-9b12-4e0c-99c7-a975626057ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-210536879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6456bdfcb5a44d3a8514fe13e8af920", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c39d07-ac", "ovs_interfaceid": "89c39d07-acd3-4f92-a168-921d07739ac6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1482.474378] env[62816]: DEBUG nova.scheduler.client.report [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1482.476369] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:eb:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '25f42474-5594-4733-a681-6c69f4afb946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89c39d07-acd3-4f92-a168-921d07739ac6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1482.483837] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Creating folder: Project (c6456bdfcb5a44d3a8514fe13e8af920). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1482.484580] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d8c14e3-c25c-4473-8994-8035e6901eef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.494646] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Created folder: Project (c6456bdfcb5a44d3a8514fe13e8af920) in parent group-v370905. [ 1482.494878] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Creating folder: Instances. Parent ref: group-v370997. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1482.495454] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f4b702de-8652-4bfc-8846-25ccff6b6f76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.497787] env[62816]: DEBUG nova.scheduler.client.report [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1482.498116] env[62816]: DEBUG nova.compute.provider_tree [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1482.507137] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "f9d9593a-1c25-47a1-98fd-4462a851f134" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.507329] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.510204] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Created folder: Instances in parent group-v370997. [ 1482.510438] env[62816]: DEBUG oslo.service.loopingcall [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1482.511448] env[62816]: DEBUG nova.scheduler.client.report [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1482.513517] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1482.513745] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff56fcff-23f1-4e4d-ab50-a363f9ef2c2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.535445] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1482.535445] env[62816]: value = "task-1788188" [ 1482.535445] env[62816]: _type = "Task" [ 1482.535445] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.536347] env[62816]: DEBUG nova.scheduler.client.report [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1482.549691] env[62816]: DEBUG oslo_vmware.api [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788183, 'name': PowerOnVM_Task, 'duration_secs': 0.908111} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.553072] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1482.553316] env[62816]: INFO nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Took 9.70 seconds to spawn the instance on the hypervisor. [ 1482.553563] env[62816]: DEBUG nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1482.553723] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788188, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.554448] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0efe19e-ca26-4058-9947-a681495617cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.796471] env[62816]: DEBUG nova.compute.manager [req-5222d2ec-70bc-4dca-af62-540d14c85dd5 req-5ba7eb79-fd7d-4a68-8880-bc28fbddf91c service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Received event network-vif-plugged-9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1482.796795] env[62816]: DEBUG oslo_concurrency.lockutils [req-5222d2ec-70bc-4dca-af62-540d14c85dd5 req-5ba7eb79-fd7d-4a68-8880-bc28fbddf91c service nova] Acquiring lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1482.797183] env[62816]: DEBUG oslo_concurrency.lockutils [req-5222d2ec-70bc-4dca-af62-540d14c85dd5 req-5ba7eb79-fd7d-4a68-8880-bc28fbddf91c service nova] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1482.797433] env[62816]: DEBUG oslo_concurrency.lockutils [req-5222d2ec-70bc-4dca-af62-540d14c85dd5 req-5ba7eb79-fd7d-4a68-8880-bc28fbddf91c service nova] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.797667] env[62816]: DEBUG nova.compute.manager [req-5222d2ec-70bc-4dca-af62-540d14c85dd5 req-5ba7eb79-fd7d-4a68-8880-bc28fbddf91c service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] No waiting events found dispatching network-vif-plugged-9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1482.797923] env[62816]: WARNING nova.compute.manager [req-5222d2ec-70bc-4dca-af62-540d14c85dd5 req-5ba7eb79-fd7d-4a68-8880-bc28fbddf91c service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Received unexpected event network-vif-plugged-9f0023d4-6a62-4c6b-862d-83c21341da28 for instance with vm_state building and task_state spawning. [ 1482.907854] env[62816]: DEBUG nova.network.neutron [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Successfully updated port: 9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1482.939207] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788185, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.033314] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02b52cd-8f44-4a75-8b22-94b3e01b3fcf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.045284] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e226c4-5944-4cf4-b24a-5d07b4f689bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.053172] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788188, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.082652] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e68bb0-f691-4281-a97f-17539aac97ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.088356] env[62816]: INFO nova.compute.manager [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Took 54.98 seconds to build instance. [ 1483.092783] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f336743-caee-46fd-9678-3b05d941f1c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.106371] env[62816]: DEBUG nova.compute.provider_tree [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1483.410601] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "refresh_cache-4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.410905] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired lock "refresh_cache-4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.410963] env[62816]: DEBUG nova.network.neutron [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1483.440925] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788185, 'name': ReconfigVM_Task, 'duration_secs': 0.600832} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.442031] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Reconfigured VM instance instance-0000001e to attach disk [datastore1] e1067d45-1938-4021-b902-21a1aa57058a/e1067d45-1938-4021-b902-21a1aa57058a.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1483.442031] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c5b4adfc-8e17-4c78-bd0c-7d38dc85e147 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.448247] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1483.448247] env[62816]: value = "task-1788189" [ 1483.448247] env[62816]: _type = "Task" [ 1483.448247] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.455913] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788189, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.551857] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788188, 'name': CreateVM_Task, 'duration_secs': 0.51938} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.552072] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1483.552838] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.553046] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.553385] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1483.554038] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e7ccc91-e71c-4ed8-baf7-b32aab35fec9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.558781] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1483.558781] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52878973-1364-f7e7-d579-df2a80ad0d1b" [ 1483.558781] env[62816]: _type = "Task" [ 1483.558781] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.566409] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52878973-1364-f7e7-d579-df2a80ad0d1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.590279] env[62816]: DEBUG oslo_concurrency.lockutils [None req-002b33d6-de7d-48a4-8427-9168cd6602ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.008s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.652538] env[62816]: DEBUG nova.scheduler.client.report [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 64 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1483.653671] env[62816]: DEBUG nova.compute.provider_tree [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 64 to 65 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1483.653671] env[62816]: DEBUG nova.compute.provider_tree [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1483.958191] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788189, 'name': Rename_Task, 'duration_secs': 0.148167} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.958817] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1483.958817] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e4e01ca8-5e3a-41a5-a771-919cdcd49e06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.962551] env[62816]: DEBUG nova.network.neutron [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1483.965506] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1483.965506] env[62816]: value = "task-1788190" [ 1483.965506] env[62816]: _type = "Task" [ 1483.965506] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.973266] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.069162] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52878973-1364-f7e7-d579-df2a80ad0d1b, 'name': SearchDatastore_Task, 'duration_secs': 0.01299} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.071808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.071808] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1484.071808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.071808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.071808] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.071808] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-af4dbced-5097-402c-a638-be0c1335009a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.079046] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.080662] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1484.080662] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7c94c01-7c44-47fc-a164-91c802cf5231 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.084929] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1484.084929] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f9510d-0994-b4c2-4b7f-198ff88300d3" [ 1484.084929] env[62816]: _type = "Task" [ 1484.084929] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.093464] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1484.096477] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f9510d-0994-b4c2-4b7f-198ff88300d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.158605] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.865s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1484.159263] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1484.161744] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.520s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1484.162391] env[62816]: DEBUG nova.objects.instance [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lazy-loading 'resources' on Instance uuid 0c5c5c06-0b5e-4e11-84b5-ca76828a0565 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1484.182655] env[62816]: DEBUG nova.network.neutron [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Updating instance_info_cache with network_info: [{"id": "9f0023d4-6a62-4c6b-862d-83c21341da28", "address": "fa:16:3e:8d:a6:f2", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0023d4-6a", "ovs_interfaceid": "9f0023d4-6a62-4c6b-862d-83c21341da28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.474583] env[62816]: DEBUG oslo_vmware.api [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788190, 'name': PowerOnVM_Task, 'duration_secs': 0.465097} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.474583] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1484.474945] env[62816]: INFO nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1484.474945] env[62816]: DEBUG nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1484.475622] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a20c164-d4b6-4860-9a52-b3d80ffb1e99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.596031] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f9510d-0994-b4c2-4b7f-198ff88300d3, 'name': SearchDatastore_Task, 'duration_secs': 0.008928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.598589] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-295206b3-3dc9-4049-a263-4773b81d9778 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.605916] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1484.605916] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524815a2-5859-840e-3d01-8445175c1215" [ 1484.605916] env[62816]: _type = "Task" [ 1484.605916] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.614716] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524815a2-5859-840e-3d01-8445175c1215, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.617372] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1484.669651] env[62816]: DEBUG nova.compute.utils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1484.671597] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1484.671597] env[62816]: DEBUG nova.network.neutron [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1484.685121] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Releasing lock "refresh_cache-4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.685483] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Instance network_info: |[{"id": "9f0023d4-6a62-4c6b-862d-83c21341da28", "address": "fa:16:3e:8d:a6:f2", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0023d4-6a", "ovs_interfaceid": "9f0023d4-6a62-4c6b-862d-83c21341da28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1484.686148] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:a6:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f0023d4-6a62-4c6b-862d-83c21341da28', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1484.693501] env[62816]: DEBUG oslo.service.loopingcall [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1484.698047] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1484.698442] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c6945c4-d480-4d56-bc21-b6cb721bef8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.719693] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1484.719693] env[62816]: value = "task-1788191" [ 1484.719693] env[62816]: _type = "Task" [ 1484.719693] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.729343] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788191, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.750633] env[62816]: DEBUG nova.policy [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '37cae39ea51942e29c9489a0c7d252e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b34c8d2bcbf49d4a4ac51a454689423', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1484.829521] env[62816]: DEBUG nova.compute.manager [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Received event network-changed-9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1484.829772] env[62816]: DEBUG nova.compute.manager [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Refreshing instance network info cache due to event network-changed-9f0023d4-6a62-4c6b-862d-83c21341da28. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1484.829996] env[62816]: DEBUG oslo_concurrency.lockutils [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] Acquiring lock "refresh_cache-4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.830304] env[62816]: DEBUG oslo_concurrency.lockutils [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] Acquired lock "refresh_cache-4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.830478] env[62816]: DEBUG nova.network.neutron [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Refreshing network info cache for port 9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1485.007023] env[62816]: INFO nova.compute.manager [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Took 53.23 seconds to build instance. [ 1485.121687] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524815a2-5859-840e-3d01-8445175c1215, 'name': SearchDatastore_Task, 'duration_secs': 0.010236} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.126029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.126029] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1/1e3f720c-5a6f-4e7c-aafc-b4680d9667e1.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1485.126029] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6defd554-3abc-46e2-8c83-67b7a64abbb0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.134579] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1485.134579] env[62816]: value = "task-1788192" [ 1485.134579] env[62816]: _type = "Task" [ 1485.134579] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.146618] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.174040] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1485.225768] env[62816]: DEBUG nova.network.neutron [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Successfully created port: 3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1485.239683] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788191, 'name': CreateVM_Task, 'duration_secs': 0.344062} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.240836] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1485.240836] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.240965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.241237] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1485.241492] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7a754cc-3874-40c9-bbef-90f5cff55355 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.248110] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1485.248110] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5257b5b6-a74e-71f9-ce02-3f570850eb39" [ 1485.248110] env[62816]: _type = "Task" [ 1485.248110] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.262714] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782d51b3-5edd-46fb-8eb8-11e13a9c5681 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.275286] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5257b5b6-a74e-71f9-ce02-3f570850eb39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.276360] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53edd35-bb9b-4061-8a03-89701e643393 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.316080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7cd6df9-6941-4b61-9824-56def16ee3c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.323896] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31574b64-4ad2-4dc8-99d9-a1beb1e1d2c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.339477] env[62816]: DEBUG nova.compute.provider_tree [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.506717] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c3852589-5f5e-42a2-95d2-582a20a26a41 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "e1067d45-1938-4021-b902-21a1aa57058a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.120s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1485.611404] env[62816]: DEBUG nova.network.neutron [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Updated VIF entry in instance network info cache for port 9f0023d4-6a62-4c6b-862d-83c21341da28. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1485.611404] env[62816]: DEBUG nova.network.neutron [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Updating instance_info_cache with network_info: [{"id": "9f0023d4-6a62-4c6b-862d-83c21341da28", "address": "fa:16:3e:8d:a6:f2", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f0023d4-6a", "ovs_interfaceid": "9f0023d4-6a62-4c6b-862d-83c21341da28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.646641] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788192, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.759408] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5257b5b6-a74e-71f9-ce02-3f570850eb39, 'name': SearchDatastore_Task, 'duration_secs': 0.028845} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.759791] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.760109] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1485.760394] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.760552] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1485.761210] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1485.761210] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2f23079b-9e0a-4c5e-9bd3-c0e5e16a7c30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.777614] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1485.777860] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1485.778746] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbbc8201-b310-4f95-881e-e255e8d8c36a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.785729] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1485.785729] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521cd11d-395f-67a0-a395-217e58f48988" [ 1485.785729] env[62816]: _type = "Task" [ 1485.785729] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.795479] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521cd11d-395f-67a0-a395-217e58f48988, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.844184] env[62816]: DEBUG nova.scheduler.client.report [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1486.010169] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1486.114478] env[62816]: DEBUG oslo_concurrency.lockutils [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] Releasing lock "refresh_cache-4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.115506] env[62816]: DEBUG nova.compute.manager [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Received event network-changed-1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1486.115506] env[62816]: DEBUG nova.compute.manager [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Refreshing instance network info cache due to event network-changed-1110b9ce-766b-4ab4-b75f-4e0139f78297. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1486.115506] env[62816]: DEBUG oslo_concurrency.lockutils [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] Acquiring lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.115753] env[62816]: DEBUG oslo_concurrency.lockutils [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] Acquired lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.115753] env[62816]: DEBUG nova.network.neutron [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Refreshing network info cache for port 1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.145957] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788192, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.90364} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.146326] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1/1e3f720c-5a6f-4e7c-aafc-b4680d9667e1.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1486.146703] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1486.146810] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-656b15ea-03d9-40c8-8086-e2c94ab14f59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.155459] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1486.155459] env[62816]: value = "task-1788193" [ 1486.155459] env[62816]: _type = "Task" [ 1486.155459] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.165925] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788193, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.185272] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1486.210408] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1486.210652] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1486.210827] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1486.211037] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1486.211200] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1486.211349] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1486.211558] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1486.211719] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1486.211887] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1486.212067] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1486.212244] env[62816]: DEBUG nova.virt.hardware [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1486.213248] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25bc4d15-e748-4591-9f35-affd1424e82f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.221119] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b962f6e-7cb8-498f-8ce7-44b79d921dc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.295920] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521cd11d-395f-67a0-a395-217e58f48988, 'name': SearchDatastore_Task, 'duration_secs': 0.063111} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.296719] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c7407f0-d692-4146-8091-f3e9e8d7fae7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.302290] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1486.302290] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5224913f-3b07-9a3e-1cea-96d53b97b079" [ 1486.302290] env[62816]: _type = "Task" [ 1486.302290] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.309787] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5224913f-3b07-9a3e-1cea-96d53b97b079, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.349013] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.351334] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.262s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.353811] env[62816]: INFO nova.compute.claims [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.374745] env[62816]: INFO nova.scheduler.client.report [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Deleted allocations for instance 0c5c5c06-0b5e-4e11-84b5-ca76828a0565 [ 1486.532064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.668545] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788193, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070215} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.669118] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1486.670102] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f390111-785b-48f4-bcf3-3be8f8792a87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.691912] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1/1e3f720c-5a6f-4e7c-aafc-b4680d9667e1.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1486.694340] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-788d9473-c129-40cd-a4fa-3dfd47a99e63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.712734] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1486.712734] env[62816]: value = "task-1788194" [ 1486.712734] env[62816]: _type = "Task" [ 1486.712734] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.720530] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788194, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.811868] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5224913f-3b07-9a3e-1cea-96d53b97b079, 'name': SearchDatastore_Task, 'duration_secs': 0.059207} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.812157] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.812413] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859/4fd2da5f-2867-4eeb-b7ab-8ffd7b096859.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1486.812806] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77e8e015-320f-4904-b538-bc603a5426e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.818695] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1486.818695] env[62816]: value = "task-1788195" [ 1486.818695] env[62816]: _type = "Task" [ 1486.818695] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.827842] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.870986] env[62816]: DEBUG nova.network.neutron [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updated VIF entry in instance network info cache for port 1110b9ce-766b-4ab4-b75f-4e0139f78297. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1486.871401] env[62816]: DEBUG nova.network.neutron [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [{"id": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "address": "fa:16:3e:d6:81:d5", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1110b9ce-76", "ovs_interfaceid": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1486.881729] env[62816]: DEBUG oslo_concurrency.lockutils [None req-abfdd586-a70a-4b2a-a385-cc27d91c7263 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "0c5c5c06-0b5e-4e11-84b5-ca76828a0565" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.972s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.008350] env[62816]: DEBUG nova.compute.manager [req-44eda81e-fcfe-4a32-a2dd-c612eee5337e req-5756301a-5070-41fb-95bf-b89b5ea2c453 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Received event network-vif-plugged-3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.008593] env[62816]: DEBUG oslo_concurrency.lockutils [req-44eda81e-fcfe-4a32-a2dd-c612eee5337e req-5756301a-5070-41fb-95bf-b89b5ea2c453 service nova] Acquiring lock "c6dc008c-6336-4271-9635-a7e0652138e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.008953] env[62816]: DEBUG oslo_concurrency.lockutils [req-44eda81e-fcfe-4a32-a2dd-c612eee5337e req-5756301a-5070-41fb-95bf-b89b5ea2c453 service nova] Lock "c6dc008c-6336-4271-9635-a7e0652138e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.009296] env[62816]: DEBUG oslo_concurrency.lockutils [req-44eda81e-fcfe-4a32-a2dd-c612eee5337e req-5756301a-5070-41fb-95bf-b89b5ea2c453 service nova] Lock "c6dc008c-6336-4271-9635-a7e0652138e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.010022] env[62816]: DEBUG nova.compute.manager [req-44eda81e-fcfe-4a32-a2dd-c612eee5337e req-5756301a-5070-41fb-95bf-b89b5ea2c453 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] No waiting events found dispatching network-vif-plugged-3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1487.010022] env[62816]: WARNING nova.compute.manager [req-44eda81e-fcfe-4a32-a2dd-c612eee5337e req-5756301a-5070-41fb-95bf-b89b5ea2c453 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Received unexpected event network-vif-plugged-3d72b4f0-d5b8-433f-8f1a-0813299ad226 for instance with vm_state building and task_state spawning. [ 1487.022940] env[62816]: DEBUG nova.network.neutron [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Successfully updated port: 3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1487.223104] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788194, 'name': ReconfigVM_Task, 'duration_secs': 0.472118} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.223469] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1/1e3f720c-5a6f-4e7c-aafc-b4680d9667e1.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1487.224228] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2762889e-fa69-4378-8664-f53b10a6f0af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.230656] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1487.230656] env[62816]: value = "task-1788196" [ 1487.230656] env[62816]: _type = "Task" [ 1487.230656] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.239405] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788196, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.330839] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788195, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.375274] env[62816]: DEBUG oslo_concurrency.lockutils [req-068ded90-38d3-4df4-a43c-c01d4c9b1a6b req-955b2a6e-4432-4c5f-a031-bea50e4dfe1f service nova] Releasing lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.527948] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "refresh_cache-c6dc008c-6336-4271-9635-a7e0652138e0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.527948] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquired lock "refresh_cache-c6dc008c-6336-4271-9635-a7e0652138e0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.527948] env[62816]: DEBUG nova.network.neutron [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1487.743792] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788196, 'name': Rename_Task, 'duration_secs': 0.187755} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.744141] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.744612] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-081bafb6-114b-4dfb-a659-357ccc2c6614 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.751959] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1487.751959] env[62816]: value = "task-1788197" [ 1487.751959] env[62816]: _type = "Task" [ 1487.751959] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.760587] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788197, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.830032] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599702} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.833176] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859/4fd2da5f-2867-4eeb-b7ab-8ffd7b096859.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1487.833425] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1487.834532] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3b06daf-622a-4dab-8f1e-5dbd39c64aef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.842110] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1487.842110] env[62816]: value = "task-1788198" [ 1487.842110] env[62816]: _type = "Task" [ 1487.842110] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.860292] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788198, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.870036] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1757f92c-6935-4600-8a12-63646e470b54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.877554] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c661dd3a-6226-42a2-acb2-994666e442da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.912450] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61a23234-6ee6-4d35-a484-53b358a44820 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.920250] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3450ec2-f0ac-461a-b353-8181546a662a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.934100] env[62816]: DEBUG nova.compute.provider_tree [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.074281] env[62816]: DEBUG nova.network.neutron [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1488.131574] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquiring lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.131839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.132640] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquiring lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.132953] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.133059] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.136198] env[62816]: INFO nova.compute.manager [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Terminating instance [ 1488.138241] env[62816]: DEBUG nova.compute.manager [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1488.138453] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1488.138690] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05e73078-144d-461e-bea6-a0f20ee84337 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.146624] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1488.146624] env[62816]: value = "task-1788199" [ 1488.146624] env[62816]: _type = "Task" [ 1488.146624] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.157357] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788199, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.262873] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788197, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.314194] env[62816]: DEBUG nova.network.neutron [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Updating instance_info_cache with network_info: [{"id": "3d72b4f0-d5b8-433f-8f1a-0813299ad226", "address": "fa:16:3e:a1:4f:44", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d72b4f0-d5", "ovs_interfaceid": "3d72b4f0-d5b8-433f-8f1a-0813299ad226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.351458] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788198, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065365} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.352080] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1488.352873] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03f290fe-655e-404f-bd89-8025fff1c456 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.379108] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859/4fd2da5f-2867-4eeb-b7ab-8ffd7b096859.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1488.380027] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1935fbd2-889d-4234-9f86-e22a0864e5ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.405227] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1488.405227] env[62816]: value = "task-1788200" [ 1488.405227] env[62816]: _type = "Task" [ 1488.405227] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.417503] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.439083] env[62816]: DEBUG nova.scheduler.client.report [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1488.657270] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788199, 'name': PowerOffVM_Task, 'duration_secs': 0.290292} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.657576] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1488.658074] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1488.658074] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-370909', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'name': 'volume-76167f57-102e-45d9-8256-5434bbce481e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1914aaa-1f3d-48b7-a6d2-ceea16dc786a', 'attached_at': '', 'detached_at': '', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'serial': '76167f57-102e-45d9-8256-5434bbce481e'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1488.658745] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19058a83-4b23-475e-94d2-0261ecb95ddd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.684378] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc6b2b8-dbc4-4706-a921-cb1aa7e2e4ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.689083] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27080c8-a2b5-4739-b5ac-0c7cfe54b562 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.709707] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16418e22-8905-4eba-95ea-53ed844bce57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.725438] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] The volume has not been displaced from its original location: [datastore1] volume-76167f57-102e-45d9-8256-5434bbce481e/volume-76167f57-102e-45d9-8256-5434bbce481e.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1488.730636] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Reconfiguring VM instance instance-00000010 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1488.730989] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9210192b-ca0e-405d-a423-dc9755ddb4e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.748754] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1488.748754] env[62816]: value = "task-1788201" [ 1488.748754] env[62816]: _type = "Task" [ 1488.748754] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.759856] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.764684] env[62816]: DEBUG oslo_vmware.api [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788197, 'name': PowerOnVM_Task, 'duration_secs': 0.963613} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.764946] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1488.765183] env[62816]: INFO nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Took 10.29 seconds to spawn the instance on the hypervisor. [ 1488.765377] env[62816]: DEBUG nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1488.766118] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efbbd5c-06d0-4d53-ae71-8f4ad05fd608 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.822274] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Releasing lock "refresh_cache-c6dc008c-6336-4271-9635-a7e0652138e0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.822614] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Instance network_info: |[{"id": "3d72b4f0-d5b8-433f-8f1a-0813299ad226", "address": "fa:16:3e:a1:4f:44", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d72b4f0-d5", "ovs_interfaceid": "3d72b4f0-d5b8-433f-8f1a-0813299ad226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1488.823315] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:4f:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d72b4f0-d5b8-433f-8f1a-0813299ad226', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1488.830665] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Creating folder: Project (8b34c8d2bcbf49d4a4ac51a454689423). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1488.830991] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cab035ff-b2f0-4c51-8e90-f8e9025bc88a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.842640] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Created folder: Project (8b34c8d2bcbf49d4a4ac51a454689423) in parent group-v370905. [ 1488.842891] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Creating folder: Instances. Parent ref: group-v371001. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1488.843295] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1b7b721-185a-4afb-890f-2992faf0fc72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.854153] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Created folder: Instances in parent group-v371001. [ 1488.854153] env[62816]: DEBUG oslo.service.loopingcall [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1488.854153] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1488.854153] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce55487b-d0bb-4c47-ac21-59fee64da285 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.875153] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1488.875153] env[62816]: value = "task-1788204" [ 1488.875153] env[62816]: _type = "Task" [ 1488.875153] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.884077] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788204, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.915135] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788200, 'name': ReconfigVM_Task, 'duration_secs': 0.283822} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.915300] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859/4fd2da5f-2867-4eeb-b7ab-8ffd7b096859.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1488.915906] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9fdf543b-0933-4473-b95f-47586c67690b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.923044] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1488.923044] env[62816]: value = "task-1788205" [ 1488.923044] env[62816]: _type = "Task" [ 1488.923044] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.929360] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788205, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.949147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.597s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.950104] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1488.953207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.152s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.954723] env[62816]: INFO nova.compute.claims [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1489.035093] env[62816]: DEBUG nova.compute.manager [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Received event network-changed-3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.035297] env[62816]: DEBUG nova.compute.manager [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Refreshing instance network info cache due to event network-changed-3d72b4f0-d5b8-433f-8f1a-0813299ad226. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1489.035519] env[62816]: DEBUG oslo_concurrency.lockutils [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] Acquiring lock "refresh_cache-c6dc008c-6336-4271-9635-a7e0652138e0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.035703] env[62816]: DEBUG oslo_concurrency.lockutils [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] Acquired lock "refresh_cache-c6dc008c-6336-4271-9635-a7e0652138e0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.035956] env[62816]: DEBUG nova.network.neutron [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Refreshing network info cache for port 3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1489.258994] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788201, 'name': ReconfigVM_Task, 'duration_secs': 0.149676} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.260023] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Reconfigured VM instance instance-00000010 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1489.265024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad82a374-62ac-4786-a6bf-4afdc622dbe1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.282967] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1489.282967] env[62816]: value = "task-1788206" [ 1489.282967] env[62816]: _type = "Task" [ 1489.282967] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.286859] env[62816]: INFO nova.compute.manager [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Took 46.58 seconds to build instance. [ 1489.294414] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788206, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.385370] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788204, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.431148] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788205, 'name': Rename_Task, 'duration_secs': 0.141667} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.431492] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1489.431750] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-081a45c5-64f5-4cff-9833-d97d629af479 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.437524] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1489.437524] env[62816]: value = "task-1788207" [ 1489.437524] env[62816]: _type = "Task" [ 1489.437524] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.446095] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.460716] env[62816]: DEBUG nova.compute.utils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1489.464505] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1489.464687] env[62816]: DEBUG nova.network.neutron [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1489.530669] env[62816]: DEBUG nova.policy [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0af00d6302f455988732c7568cf84c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '005f772e517340a0acaac0d61b8262df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1489.789182] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bb75af81-9c0c-4d74-8b0a-b3e976b97b49 tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.201s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1489.803336] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788206, 'name': ReconfigVM_Task, 'duration_secs': 0.186905} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.803776] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-370909', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'name': 'volume-76167f57-102e-45d9-8256-5434bbce481e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'f1914aaa-1f3d-48b7-a6d2-ceea16dc786a', 'attached_at': '', 'detached_at': '', 'volume_id': '76167f57-102e-45d9-8256-5434bbce481e', 'serial': '76167f57-102e-45d9-8256-5434bbce481e'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1489.804158] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1489.805060] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27fc62b-f20c-415d-8166-9c05f5c4dba4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.813836] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1489.814169] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4169b554-7a36-4c99-a2a3-4521b0c87e20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.875406] env[62816]: DEBUG nova.network.neutron [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Updated VIF entry in instance network info cache for port 3d72b4f0-d5b8-433f-8f1a-0813299ad226. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1489.875773] env[62816]: DEBUG nova.network.neutron [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Updating instance_info_cache with network_info: [{"id": "3d72b4f0-d5b8-433f-8f1a-0813299ad226", "address": "fa:16:3e:a1:4f:44", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.202", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d72b4f0-d5", "ovs_interfaceid": "3d72b4f0-d5b8-433f-8f1a-0813299ad226", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.883373] env[62816]: DEBUG nova.network.neutron [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Successfully created port: 6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1489.890804] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788204, 'name': CreateVM_Task, 'duration_secs': 0.639052} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.892044] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1489.892456] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1489.892582] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1489.892754] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Deleting the datastore file [datastore1] f1914aaa-1f3d-48b7-a6d2-ceea16dc786a {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1489.894568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1489.894739] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1489.895075] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1489.895358] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf043254-12de-4c9e-bdd1-09f300867171 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.897532] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3aa10d0-7cf1-4bf9-a62b-5c66187e1154 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.902232] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1489.902232] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52223338-1ab4-b3f7-6898-f4277b4e7046" [ 1489.902232] env[62816]: _type = "Task" [ 1489.902232] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.903547] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for the task: (returnval){ [ 1489.903547] env[62816]: value = "task-1788209" [ 1489.903547] env[62816]: _type = "Task" [ 1489.903547] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.917769] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52223338-1ab4-b3f7-6898-f4277b4e7046, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.918508] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788209, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.948067] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788207, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.966070] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1490.307066] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1490.384241] env[62816]: DEBUG oslo_concurrency.lockutils [req-7daa3326-885f-41a9-af27-b998634009d4 req-79220493-0888-484c-bdfa-2189e5437726 service nova] Releasing lock "refresh_cache-c6dc008c-6336-4271-9635-a7e0652138e0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.403546] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.403848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.420749] env[62816]: DEBUG oslo_vmware.api [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Task: {'id': task-1788209, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.106501} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.425645] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1490.425976] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1490.426225] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1490.426502] env[62816]: INFO nova.compute.manager [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Took 2.29 seconds to destroy the instance on the hypervisor. [ 1490.426783] env[62816]: DEBUG oslo.service.loopingcall [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1490.427384] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52223338-1ab4-b3f7-6898-f4277b4e7046, 'name': SearchDatastore_Task, 'duration_secs': 0.011907} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.430374] env[62816]: DEBUG nova.compute.manager [-] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1490.430548] env[62816]: DEBUG nova.network.neutron [-] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1490.432778] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.433098] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1490.433389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.433575] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.433854] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1490.434695] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ecd85548-f858-4a12-a083-c07ac09e7aa4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.445062] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1490.445268] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1490.448757] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8178d4ab-0453-4fd5-bee9-5489de885f4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.451320] env[62816]: DEBUG oslo_vmware.api [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788207, 'name': PowerOnVM_Task, 'duration_secs': 0.754414} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.451618] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1490.451875] env[62816]: INFO nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Took 8.13 seconds to spawn the instance on the hypervisor. [ 1490.452094] env[62816]: DEBUG nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1490.455622] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c719b0-181d-4c87-a84b-dc1817cf0d3d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.460090] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1490.460090] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5241e69d-b3a7-02c5-d4ef-ad0e2bf21ebf" [ 1490.460090] env[62816]: _type = "Task" [ 1490.460090] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.476170] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5241e69d-b3a7-02c5-d4ef-ad0e2bf21ebf, 'name': SearchDatastore_Task} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.480517] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e867fd8-e4b9-452a-8eee-086e936d8cdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.487440] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1490.487440] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d08b86-cf87-34eb-1c89-688623a74e23" [ 1490.487440] env[62816]: _type = "Task" [ 1490.487440] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.498449] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d08b86-cf87-34eb-1c89-688623a74e23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.565324] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed24268e-9092-4292-8aa5-f50bffc61a13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.573208] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e227e25e-42fe-4dd2-92a1-96dc06dc0852 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.604160] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c922757c-628f-4aca-a65e-1c93693a9d9b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.611985] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b796fb-50dc-44f0-b04b-65700e2e3c87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.626757] env[62816]: DEBUG nova.compute.provider_tree [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1490.796409] env[62816]: DEBUG nova.compute.manager [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Received event network-changed-89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.796409] env[62816]: DEBUG nova.compute.manager [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Refreshing instance network info cache due to event network-changed-89c39d07-acd3-4f92-a168-921d07739ac6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1490.796409] env[62816]: DEBUG oslo_concurrency.lockutils [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] Acquiring lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.796409] env[62816]: DEBUG oslo_concurrency.lockutils [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] Acquired lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.796409] env[62816]: DEBUG nova.network.neutron [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Refreshing network info cache for port 89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.830075] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.982525] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1490.985447] env[62816]: INFO nova.compute.manager [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Took 46.50 seconds to build instance. [ 1490.998270] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d08b86-cf87-34eb-1c89-688623a74e23, 'name': SearchDatastore_Task, 'duration_secs': 0.018735} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.998708] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.999018] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c6dc008c-6336-4271-9635-a7e0652138e0/c6dc008c-6336-4271-9635-a7e0652138e0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1490.999873] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c97bca65-f76c-4c21-8e94-6ed28ec9bf75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.008640] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1491.008640] env[62816]: value = "task-1788210" [ 1491.008640] env[62816]: _type = "Task" [ 1491.008640] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.022767] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.025165] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1491.025396] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1491.025555] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1491.025736] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1491.025881] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1491.026039] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1491.026293] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1491.026507] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1491.026691] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1491.026854] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1491.027040] env[62816]: DEBUG nova.virt.hardware [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1491.027837] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42ad595b-01b2-4ec9-9d13-00ae28383a32 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.036231] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347c489a-aa41-481b-bf2b-f89ece1c9d70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.130111] env[62816]: DEBUG nova.scheduler.client.report [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1491.490304] env[62816]: DEBUG oslo_concurrency.lockutils [None req-69ba5874-ea09-4097-b8de-aa2ca2a87134 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.032s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.522975] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788210, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.637683] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.638234] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1491.644857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.053s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.644857] env[62816]: INFO nova.compute.claims [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1491.817966] env[62816]: DEBUG nova.network.neutron [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Updated VIF entry in instance network info cache for port 89c39d07-acd3-4f92-a168-921d07739ac6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.819017] env[62816]: DEBUG nova.network.neutron [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Updating instance_info_cache with network_info: [{"id": "89c39d07-acd3-4f92-a168-921d07739ac6", "address": "fa:16:3e:70:eb:28", "network": {"id": "50096a05-9b12-4e0c-99c7-a975626057ba", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-210536879-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.164", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6456bdfcb5a44d3a8514fe13e8af920", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "25f42474-5594-4733-a681-6c69f4afb946", "external-id": "nsx-vlan-transportzone-453", "segmentation_id": 453, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89c39d07-ac", "ovs_interfaceid": "89c39d07-acd3-4f92-a168-921d07739ac6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.860749] env[62816]: DEBUG nova.network.neutron [-] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1491.861957] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.862195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.888373] env[62816]: DEBUG nova.network.neutron [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Successfully updated port: 6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.992274] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1492.028434] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788210, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521979} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.028706] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c6dc008c-6336-4271-9635-a7e0652138e0/c6dc008c-6336-4271-9635-a7e0652138e0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1492.028913] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1492.030219] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fad47d5-2be6-4d8b-8520-32ab77845209 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.035955] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1492.035955] env[62816]: value = "task-1788211" [ 1492.035955] env[62816]: _type = "Task" [ 1492.035955] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.046061] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788211, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.157545] env[62816]: DEBUG nova.compute.utils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1492.160581] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1492.160673] env[62816]: DEBUG nova.network.neutron [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1492.181082] env[62816]: DEBUG nova.compute.manager [req-67f122b1-8039-4a69-acae-dd1ba575d517 req-7728abcd-d1fb-40de-be54-61b2f87fcb7a service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Received event network-vif-plugged-6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.181322] env[62816]: DEBUG oslo_concurrency.lockutils [req-67f122b1-8039-4a69-acae-dd1ba575d517 req-7728abcd-d1fb-40de-be54-61b2f87fcb7a service nova] Acquiring lock "b409568f-6e04-4218-8a7b-1bbf785115c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.181536] env[62816]: DEBUG oslo_concurrency.lockutils [req-67f122b1-8039-4a69-acae-dd1ba575d517 req-7728abcd-d1fb-40de-be54-61b2f87fcb7a service nova] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.181704] env[62816]: DEBUG oslo_concurrency.lockutils [req-67f122b1-8039-4a69-acae-dd1ba575d517 req-7728abcd-d1fb-40de-be54-61b2f87fcb7a service nova] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.181888] env[62816]: DEBUG nova.compute.manager [req-67f122b1-8039-4a69-acae-dd1ba575d517 req-7728abcd-d1fb-40de-be54-61b2f87fcb7a service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] No waiting events found dispatching network-vif-plugged-6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.182055] env[62816]: WARNING nova.compute.manager [req-67f122b1-8039-4a69-acae-dd1ba575d517 req-7728abcd-d1fb-40de-be54-61b2f87fcb7a service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Received unexpected event network-vif-plugged-6069e840-7095-4621-bf07-1d83bb93ce9d for instance with vm_state building and task_state spawning. [ 1492.216312] env[62816]: DEBUG nova.policy [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9b48919c724430496c5d846d7300f52', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c6b942889914783a95c2abb080137a9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1492.322667] env[62816]: DEBUG oslo_concurrency.lockutils [req-fd41a982-95ac-42a9-87f4-e3251465bfe8 req-4c07e9a3-8334-4e68-b8f5-1c70d693b9b4 service nova] Releasing lock "refresh_cache-1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.361235] env[62816]: INFO nova.compute.manager [-] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Took 1.93 seconds to deallocate network for instance. [ 1492.364585] env[62816]: DEBUG nova.compute.utils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1492.390596] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "refresh_cache-b409568f-6e04-4218-8a7b-1bbf785115c3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.390750] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "refresh_cache-b409568f-6e04-4218-8a7b-1bbf785115c3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.391031] env[62816]: DEBUG nova.network.neutron [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.477099] env[62816]: DEBUG nova.network.neutron [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Successfully created port: 64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1492.516085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.547522] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788211, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069248} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.547522] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1492.547861] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4f519e-74d2-40c4-b7f7-ae31587f9578 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.572111] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] c6dc008c-6336-4271-9635-a7e0652138e0/c6dc008c-6336-4271-9635-a7e0652138e0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1492.572585] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-316a822b-3046-4a1d-aea5-39f0d841a6f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.606023] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1492.606023] env[62816]: value = "task-1788212" [ 1492.606023] env[62816]: _type = "Task" [ 1492.606023] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.615155] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.661513] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1492.814030] env[62816]: DEBUG nova.compute.manager [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.815392] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a05dc6dc-65b9-408e-aa57-44242a987ce5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.867260] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.925793] env[62816]: DEBUG nova.network.neutron [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1492.937208] env[62816]: INFO nova.compute.manager [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Took 0.57 seconds to detach 1 volumes for instance. [ 1492.942342] env[62816]: DEBUG nova.compute.manager [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Deleting volume: 76167f57-102e-45d9-8256-5434bbce481e {{(pid=62816) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1493.081606] env[62816]: DEBUG nova.compute.manager [req-be60f856-671a-4d42-ac32-32f161704476 req-f3c6dbfe-3409-40b8-9411-9f764030cf70 service nova] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Received event network-vif-deleted-6b060db8-dee6-465b-8fb0-980f49a5e433 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.116992] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.139011] env[62816]: DEBUG nova.network.neutron [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Updating instance_info_cache with network_info: [{"id": "6069e840-7095-4621-bf07-1d83bb93ce9d", "address": "fa:16:3e:fb:8e:8b", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6069e840-70", "ovs_interfaceid": "6069e840-7095-4621-bf07-1d83bb93ce9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.194345] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d49073-8b1e-4e46-b409-922550a7d0ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.204037] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c05ff2-b0d8-4d73-a7e6-ddc1b9db3a7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.239525] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a67fe1-4c47-4d87-b717-0cef1ad81c3d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.247973] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb018d2-db25-4d86-a4b9-f03347265435 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.262732] env[62816]: DEBUG nova.compute.provider_tree [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1493.330280] env[62816]: INFO nova.compute.manager [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] instance snapshotting [ 1493.335086] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b41bfd51-e8d0-43bd-98b4-425336b748ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.357903] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abfd485-8e56-4d81-8f51-f79b96e1fb8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.503981] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.617762] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788212, 'name': ReconfigVM_Task, 'duration_secs': 0.584439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1493.618099] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Reconfigured VM instance instance-00000021 to attach disk [datastore1] c6dc008c-6336-4271-9635-a7e0652138e0/c6dc008c-6336-4271-9635-a7e0652138e0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1493.619115] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-280edc8d-171a-4ebe-a9d6-b4d02fa62754 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.626776] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1493.626776] env[62816]: value = "task-1788214" [ 1493.626776] env[62816]: _type = "Task" [ 1493.626776] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.636046] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788214, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.644782] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "refresh_cache-b409568f-6e04-4218-8a7b-1bbf785115c3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.645146] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Instance network_info: |[{"id": "6069e840-7095-4621-bf07-1d83bb93ce9d", "address": "fa:16:3e:fb:8e:8b", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6069e840-70", "ovs_interfaceid": "6069e840-7095-4621-bf07-1d83bb93ce9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1493.645596] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:8e:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6069e840-7095-4621-bf07-1d83bb93ce9d', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.654183] env[62816]: DEBUG oslo.service.loopingcall [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.654441] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.654676] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f8fd9fd-547b-492e-8e90-857f0e85805f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.672114] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1493.681820] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.681820] env[62816]: value = "task-1788215" [ 1493.681820] env[62816]: _type = "Task" [ 1493.681820] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.690474] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788215, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.704505] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1493.704795] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1493.704933] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1493.705133] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1493.705283] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1493.705429] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1493.705645] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1493.705805] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1493.705971] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1493.706153] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1493.706333] env[62816]: DEBUG nova.virt.hardware [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1493.707231] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3d857b-c593-4c7e-b61d-0d5264dce6ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.718545] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093c2d90-2664-49a9-bd1b-4d16197ec2f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.766352] env[62816]: DEBUG nova.scheduler.client.report [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1493.869487] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1493.869917] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3d74ba44-aba5-4b20-97b5-3fface7ddfd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.879170] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1493.879170] env[62816]: value = "task-1788216" [ 1493.879170] env[62816]: _type = "Task" [ 1493.879170] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.890692] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788216, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.956515] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1493.956765] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.956839] env[62816]: INFO nova.compute.manager [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Attaching volume f19290f7-d256-4b28-a0db-14a599c23011 to /dev/sdb [ 1493.990531] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7630eafa-d848-4e1f-9861-ffdce7ec4f94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.998858] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d3c2f0-91b9-458d-a6f2-e68edc66ffcb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.013423] env[62816]: DEBUG nova.virt.block_device [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updating existing volume attachment record: cfa96a9c-ffca-4b3a-b16d-a796d8e6fdab {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1494.082826] env[62816]: DEBUG nova.network.neutron [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Successfully updated port: 64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1494.141420] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788214, 'name': Rename_Task, 'duration_secs': 0.285318} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.141593] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1494.142252] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48f443d3-2030-46c7-9f6f-b62f55703c5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.149465] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1494.149465] env[62816]: value = "task-1788217" [ 1494.149465] env[62816]: _type = "Task" [ 1494.149465] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.161454] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788217, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.192440] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788215, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.217552] env[62816]: DEBUG nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Received event network-changed-6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1494.217895] env[62816]: DEBUG nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Refreshing instance network info cache due to event network-changed-6069e840-7095-4621-bf07-1d83bb93ce9d. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1494.218221] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Acquiring lock "refresh_cache-b409568f-6e04-4218-8a7b-1bbf785115c3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.218443] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Acquired lock "refresh_cache-b409568f-6e04-4218-8a7b-1bbf785115c3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.218792] env[62816]: DEBUG nova.network.neutron [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Refreshing network info cache for port 6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.272678] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.273360] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1494.277369] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.035s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.277522] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1494.277745] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1494.278186] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.985s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1494.280284] env[62816]: INFO nova.compute.claims [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1494.285029] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdc7abe-1bfd-4742-8ee9-420701f42820 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.295219] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334c332e-7cea-4e2a-83b6-626aa57c6e31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.312658] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b473301-75de-4e34-b962-4752fc314ee1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.320814] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bab5e9-c689-461e-942a-c37be8d9b90c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.355786] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179744MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1494.355972] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1494.391653] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788216, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.587666] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.587879] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.588096] env[62816]: DEBUG nova.network.neutron [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1494.660134] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788217, 'name': PowerOnVM_Task} progress is 96%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.694069] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788215, 'name': CreateVM_Task, 'duration_secs': 0.512275} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.694350] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.695474] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.695559] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.696073] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.696444] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79a776ca-bdcc-4f7b-9444-da2474cc5e62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.702333] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1494.702333] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fb3a62-23df-63f9-99f1-aac3eac0d31e" [ 1494.702333] env[62816]: _type = "Task" [ 1494.702333] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.710203] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fb3a62-23df-63f9-99f1-aac3eac0d31e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.785381] env[62816]: DEBUG nova.compute.utils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1494.788982] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1494.789184] env[62816]: DEBUG nova.network.neutron [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1494.899022] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788216, 'name': CreateSnapshot_Task, 'duration_secs': 0.643895} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.899022] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1494.899413] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d08f72b9-8917-4389-9264-9ec7cdd7aadd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.902713] env[62816]: DEBUG nova.policy [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'caa3fab70d854a7b8134ca935692c306', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f4b3d1d951945a7a7f808588e3c7c93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1495.075397] env[62816]: DEBUG nova.network.neutron [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Updated VIF entry in instance network info cache for port 6069e840-7095-4621-bf07-1d83bb93ce9d. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1495.075619] env[62816]: DEBUG nova.network.neutron [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Updating instance_info_cache with network_info: [{"id": "6069e840-7095-4621-bf07-1d83bb93ce9d", "address": "fa:16:3e:fb:8e:8b", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6069e840-70", "ovs_interfaceid": "6069e840-7095-4621-bf07-1d83bb93ce9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.142783] env[62816]: DEBUG nova.network.neutron [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1495.161598] env[62816]: DEBUG oslo_vmware.api [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788217, 'name': PowerOnVM_Task, 'duration_secs': 0.647907} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.162026] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1495.162341] env[62816]: INFO nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Took 8.98 seconds to spawn the instance on the hypervisor. [ 1495.162645] env[62816]: DEBUG nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.163550] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a521e5e-8210-4c92-a195-dbd7a8868cda {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.214719] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fb3a62-23df-63f9-99f1-aac3eac0d31e, 'name': SearchDatastore_Task, 'duration_secs': 0.010943} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.215327] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.215709] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1495.216101] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.216402] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.216737] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1495.217123] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64599014-0af1-4391-84f7-43bb318305ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.228047] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1495.228047] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1495.229483] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4116e12-5b8d-4f2b-b4b9-2d55ca6d682e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.239835] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1495.239835] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225657d-e9fc-4a3a-e6fe-466431fac8e4" [ 1495.239835] env[62816]: _type = "Task" [ 1495.239835] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.246888] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5225657d-e9fc-4a3a-e6fe-466431fac8e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.292482] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1495.422100] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1495.425242] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2b98ace4-093e-46dd-9ac8-a0d2c2e2941b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.431741] env[62816]: DEBUG nova.network.neutron [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [{"id": "64790bf9-4e84-424e-a85d-819c0d6cade8", "address": "fa:16:3e:51:d0:5a", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64790bf9-4e", "ovs_interfaceid": "64790bf9-4e84-424e-a85d-819c0d6cade8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1495.437030] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1495.437030] env[62816]: value = "task-1788221" [ 1495.437030] env[62816]: _type = "Task" [ 1495.437030] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.449765] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788221, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.504102] env[62816]: DEBUG nova.network.neutron [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Successfully created port: c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1495.515963] env[62816]: DEBUG nova.compute.manager [None req-0990a20b-4ecd-48c9-a618-16b9cfd5a36f tempest-ServerDiagnosticsTest-2005245349 tempest-ServerDiagnosticsTest-2005245349-project-admin] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.516616] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ea45e1-5bd0-4934-86cb-b941dba549b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.524324] env[62816]: INFO nova.compute.manager [None req-0990a20b-4ecd-48c9-a618-16b9cfd5a36f tempest-ServerDiagnosticsTest-2005245349 tempest-ServerDiagnosticsTest-2005245349-project-admin] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Retrieving diagnostics [ 1495.527738] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e90aadb-c5d1-4d14-8cc1-ef0f9a5f2ac3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.578510] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Releasing lock "refresh_cache-b409568f-6e04-4218-8a7b-1bbf785115c3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.578773] env[62816]: DEBUG nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Received event network-vif-plugged-64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.579082] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Acquiring lock "d16a99df-f092-4d56-9730-852883bbdb70-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.579394] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Lock "d16a99df-f092-4d56-9730-852883bbdb70-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.579644] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Lock "d16a99df-f092-4d56-9730-852883bbdb70-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.579900] env[62816]: DEBUG nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] No waiting events found dispatching network-vif-plugged-64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1495.580231] env[62816]: WARNING nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Received unexpected event network-vif-plugged-64790bf9-4e84-424e-a85d-819c0d6cade8 for instance with vm_state building and task_state spawning. [ 1495.580461] env[62816]: DEBUG nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Received event network-changed-64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.580648] env[62816]: DEBUG nova.compute.manager [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Refreshing instance network info cache due to event network-changed-64790bf9-4e84-424e-a85d-819c0d6cade8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1495.580901] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Acquiring lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.687153] env[62816]: INFO nova.compute.manager [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Took 50.11 seconds to build instance. [ 1495.749748] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5225657d-e9fc-4a3a-e6fe-466431fac8e4, 'name': SearchDatastore_Task, 'duration_secs': 0.011206} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.750847] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-925ce7f7-e281-45ec-bed1-c8fe3abc020b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.755921] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1495.755921] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5275f036-3066-8f2c-a39c-f64fc538568b" [ 1495.755921] env[62816]: _type = "Task" [ 1495.755921] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.764798] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5275f036-3066-8f2c-a39c-f64fc538568b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.844018] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c68e9e-a450-4c49-b3c6-dc2842b98561 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.852089] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30974859-c830-4b99-a38c-68fad51953d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.886140] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d322ced3-6b1b-4d65-aae7-c5c3682c10c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.893927] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-695984fc-b872-4e9f-a2ab-2cf33ff71cec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.908699] env[62816]: DEBUG nova.compute.provider_tree [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1495.932895] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.933319] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Instance network_info: |[{"id": "64790bf9-4e84-424e-a85d-819c0d6cade8", "address": "fa:16:3e:51:d0:5a", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64790bf9-4e", "ovs_interfaceid": "64790bf9-4e84-424e-a85d-819c0d6cade8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1495.933747] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Acquired lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.933936] env[62816]: DEBUG nova.network.neutron [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Refreshing network info cache for port 64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.935027] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:d0:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64790bf9-4e84-424e-a85d-819c0d6cade8', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1495.942310] env[62816]: DEBUG oslo.service.loopingcall [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1495.946189] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1495.949824] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fc0dcf5-6841-45cb-bbfa-4648780a52b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.970239] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788221, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.971644] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1495.971644] env[62816]: value = "task-1788222" [ 1495.971644] env[62816]: _type = "Task" [ 1495.971644] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.979762] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788222, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.189326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5679f42e-76ed-4ac9-86c3-f4c073380038 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "c6dc008c-6336-4271-9635-a7e0652138e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.869s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.227028] env[62816]: DEBUG nova.network.neutron [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updated VIF entry in instance network info cache for port 64790bf9-4e84-424e-a85d-819c0d6cade8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1496.227442] env[62816]: DEBUG nova.network.neutron [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [{"id": "64790bf9-4e84-424e-a85d-819c0d6cade8", "address": "fa:16:3e:51:d0:5a", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64790bf9-4e", "ovs_interfaceid": "64790bf9-4e84-424e-a85d-819c0d6cade8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.270960] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5275f036-3066-8f2c-a39c-f64fc538568b, 'name': SearchDatastore_Task, 'duration_secs': 0.009192} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.271394] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.271962] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b409568f-6e04-4218-8a7b-1bbf785115c3/b409568f-6e04-4218-8a7b-1bbf785115c3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1496.272261] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eabee466-6b28-44e1-8402-0b79132ffa4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.282240] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1496.282240] env[62816]: value = "task-1788223" [ 1496.282240] env[62816]: _type = "Task" [ 1496.282240] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.301278] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.306610] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1496.334672] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1496.334950] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1496.335136] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1496.335327] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1496.335475] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1496.335623] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1496.335859] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1496.335993] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1496.336208] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1496.336389] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1496.336577] env[62816]: DEBUG nova.virt.hardware [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1496.337622] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804b27fa-8c8a-4f35-b11d-f95b31e64a3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.346192] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414cb710-392d-44b0-83d8-203efdb9502f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.411710] env[62816]: DEBUG nova.scheduler.client.report [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1496.456454] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788221, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.481433] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788222, 'name': CreateVM_Task, 'duration_secs': 0.373467} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.481614] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1496.482488] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1496.482609] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.483140] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1496.483438] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a1815ac-633e-46a2-8151-594cc0575332 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.488577] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1496.488577] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52456c84-0a10-68a1-23ca-f4f320937287" [ 1496.488577] env[62816]: _type = "Task" [ 1496.488577] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.498598] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52456c84-0a10-68a1-23ca-f4f320937287, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.693237] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1496.731685] env[62816]: DEBUG oslo_concurrency.lockutils [req-a32559fd-970d-46b4-b8db-1d66c3bb48ba req-229a33de-19a5-4445-8cfe-02bc09f7c587 service nova] Releasing lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.795651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "c6dc008c-6336-4271-9635-a7e0652138e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.795858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "c6dc008c-6336-4271-9635-a7e0652138e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.796082] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "c6dc008c-6336-4271-9635-a7e0652138e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1496.797466] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "c6dc008c-6336-4271-9635-a7e0652138e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.797466] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "c6dc008c-6336-4271-9635-a7e0652138e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.798436] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788223, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.798991] env[62816]: INFO nova.compute.manager [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Terminating instance [ 1496.801101] env[62816]: DEBUG nova.compute.manager [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1496.801468] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1496.802354] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0377f1-3738-4e54-bcb6-f3b5ad9b0ab4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.810339] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1496.811241] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3c530c2-d3ca-4e6b-bf2e-3f935bc50e85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.817482] env[62816]: DEBUG oslo_vmware.api [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1496.817482] env[62816]: value = "task-1788225" [ 1496.817482] env[62816]: _type = "Task" [ 1496.817482] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.827248] env[62816]: DEBUG oslo_vmware.api [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.916771] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1496.917535] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1496.921357] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.403s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1496.921765] env[62816]: DEBUG nova.objects.instance [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lazy-loading 'resources' on Instance uuid 128bd207-a483-4b38-9fd4-4fb996ce1d0d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1496.957300] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788221, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.000561] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52456c84-0a10-68a1-23ca-f4f320937287, 'name': SearchDatastore_Task, 'duration_secs': 0.076808} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.000797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.001068] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1497.001326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.001478] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1497.001658] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1497.001960] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59161b5b-3d15-45c7-9a18-872bf7d51e48 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.011344] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1497.011534] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1497.012314] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9af1588a-1ed7-4093-8be2-e9fef3f472d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.018157] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1497.018157] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e1a066-fa79-202f-e429-ee013c434e71" [ 1497.018157] env[62816]: _type = "Task" [ 1497.018157] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.027288] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e1a066-fa79-202f-e429-ee013c434e71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.086189] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "f6ddaab3-d420-4ee4-bf75-486228826635" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.086690] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "f6ddaab3-d420-4ee4-bf75-486228826635" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.087692] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "f6ddaab3-d420-4ee4-bf75-486228826635-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.087692] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "f6ddaab3-d420-4ee4-bf75-486228826635-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.087692] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "f6ddaab3-d420-4ee4-bf75-486228826635-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.092020] env[62816]: INFO nova.compute.manager [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Terminating instance [ 1497.092020] env[62816]: DEBUG nova.compute.manager [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1497.092020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1497.092945] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93500636-1a45-458e-9172-879b207684d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.105472] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1497.106527] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a4ce711-7a31-493d-b1d4-49f9fbeaf74c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.114604] env[62816]: DEBUG oslo_vmware.api [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1497.114604] env[62816]: value = "task-1788226" [ 1497.114604] env[62816]: _type = "Task" [ 1497.114604] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.124985] env[62816]: DEBUG oslo_vmware.api [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788226, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.225239] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.296831] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788223, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654487} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.296831] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b409568f-6e04-4218-8a7b-1bbf785115c3/b409568f-6e04-4218-8a7b-1bbf785115c3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1497.296831] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1497.296831] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3cc889b2-a5e7-43b4-8f1c-a541bd48f79c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.304219] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1497.304219] env[62816]: value = "task-1788227" [ 1497.304219] env[62816]: _type = "Task" [ 1497.304219] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.313335] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788227, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.327424] env[62816]: DEBUG oslo_vmware.api [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788225, 'name': PowerOffVM_Task, 'duration_secs': 0.241011} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.327745] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1497.327958] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1497.328236] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c5983b1-cb40-48cd-af98-de97022acf08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.421245] env[62816]: DEBUG nova.compute.manager [req-8f2f14ae-cc3f-4ad3-bfea-018f682ce4ef req-739b75bd-31a7-499d-a49b-834677119af7 service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Received event network-vif-plugged-c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.421245] env[62816]: DEBUG oslo_concurrency.lockutils [req-8f2f14ae-cc3f-4ad3-bfea-018f682ce4ef req-739b75bd-31a7-499d-a49b-834677119af7 service nova] Acquiring lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.425560] env[62816]: DEBUG oslo_concurrency.lockutils [req-8f2f14ae-cc3f-4ad3-bfea-018f682ce4ef req-739b75bd-31a7-499d-a49b-834677119af7 service nova] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.004s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.425560] env[62816]: DEBUG oslo_concurrency.lockutils [req-8f2f14ae-cc3f-4ad3-bfea-018f682ce4ef req-739b75bd-31a7-499d-a49b-834677119af7 service nova] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.425560] env[62816]: DEBUG nova.compute.manager [req-8f2f14ae-cc3f-4ad3-bfea-018f682ce4ef req-739b75bd-31a7-499d-a49b-834677119af7 service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] No waiting events found dispatching network-vif-plugged-c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1497.425560] env[62816]: WARNING nova.compute.manager [req-8f2f14ae-cc3f-4ad3-bfea-018f682ce4ef req-739b75bd-31a7-499d-a49b-834677119af7 service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Received unexpected event network-vif-plugged-c9c1cb74-1895-4673-9834-96675448ee76 for instance with vm_state building and task_state spawning. [ 1497.433830] env[62816]: DEBUG nova.compute.utils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1497.435912] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1497.436109] env[62816]: DEBUG nova.network.neutron [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1497.459264] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1497.459485] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1497.459664] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Deleting the datastore file [datastore1] c6dc008c-6336-4271-9635-a7e0652138e0 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1497.463129] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5fd3db0-d736-4b93-8b6a-fcbce6fdda59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.465161] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788221, 'name': CloneVM_Task, 'duration_secs': 1.544009} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.465928] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Created linked-clone VM from snapshot [ 1497.467144] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cb18e4-0d5a-4069-a398-a7253cb5469b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.473467] env[62816]: DEBUG oslo_vmware.api [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for the task: (returnval){ [ 1497.473467] env[62816]: value = "task-1788229" [ 1497.473467] env[62816]: _type = "Task" [ 1497.473467] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.481399] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Uploading image c5ecd3ac-4578-4849-be1d-6f8ad2bc4a2b {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1497.492831] env[62816]: DEBUG oslo_vmware.api [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.526420] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1497.526420] env[62816]: value = "vm-371009" [ 1497.526420] env[62816]: _type = "VirtualMachine" [ 1497.526420] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1497.526714] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-3fdf071b-badf-4add-b6b3-00befc48c9ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.535390] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e1a066-fa79-202f-e429-ee013c434e71, 'name': SearchDatastore_Task, 'duration_secs': 0.009335} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.537129] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dfa527b-87f8-43e7-b02c-62913a7037d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.542878] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lease: (returnval){ [ 1497.542878] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528b9e9f-6201-1558-a2b3-22418d876592" [ 1497.542878] env[62816]: _type = "HttpNfcLease" [ 1497.542878] env[62816]: } obtained for exporting VM: (result){ [ 1497.542878] env[62816]: value = "vm-371009" [ 1497.542878] env[62816]: _type = "VirtualMachine" [ 1497.542878] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1497.543133] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the lease: (returnval){ [ 1497.543133] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528b9e9f-6201-1558-a2b3-22418d876592" [ 1497.543133] env[62816]: _type = "HttpNfcLease" [ 1497.543133] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1497.553500] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1497.553500] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52965999-f419-477e-ab75-5f6ef4a71e83" [ 1497.553500] env[62816]: _type = "Task" [ 1497.553500] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.557718] env[62816]: DEBUG nova.network.neutron [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Successfully updated port: c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1497.562359] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1497.562359] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528b9e9f-6201-1558-a2b3-22418d876592" [ 1497.562359] env[62816]: _type = "HttpNfcLease" [ 1497.562359] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1497.562359] env[62816]: DEBUG nova.policy [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cb65109f88440fdba4a763bda9c3c5e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c6541e22d1349eb9818ec4c59270c5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1497.570033] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52965999-f419-477e-ab75-5f6ef4a71e83, 'name': SearchDatastore_Task, 'duration_secs': 0.013262} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.574738] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.575017] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d16a99df-f092-4d56-9730-852883bbdb70/d16a99df-f092-4d56-9730-852883bbdb70.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1497.575896] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef319f07-f769-4373-84d5-904e8a68253f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.584568] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1497.584568] env[62816]: value = "task-1788231" [ 1497.584568] env[62816]: _type = "Task" [ 1497.584568] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.596974] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788231, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.627963] env[62816]: DEBUG oslo_vmware.api [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788226, 'name': PowerOffVM_Task, 'duration_secs': 0.246693} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.628400] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1497.629614] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1497.629614] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eca77a6d-0ed2-4db7-a2c8-fb6e5bccd26c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.725367] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "a01e772c-dafe-4091-bae6-f9f59d5c972d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.725756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.815021] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788227, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076069} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.815586] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1497.816518] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4788f6b8-cd27-46ce-9c46-e58c1e9d0119 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.844459] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] b409568f-6e04-4218-8a7b-1bbf785115c3/b409568f-6e04-4218-8a7b-1bbf785115c3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.848019] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b27760e-a3f0-47e3-a034-d17990fc6d84 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.868365] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1497.868365] env[62816]: value = "task-1788233" [ 1497.868365] env[62816]: _type = "Task" [ 1497.868365] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.878983] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.935720] env[62816]: DEBUG nova.network.neutron [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Successfully created port: e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1497.940851] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1497.983950] env[62816]: DEBUG oslo_vmware.api [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Task: {'id': task-1788229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305464} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.984243] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1497.984432] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1497.984606] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1497.984791] env[62816]: INFO nova.compute.manager [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1497.985041] env[62816]: DEBUG oslo.service.loopingcall [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1497.985240] env[62816]: DEBUG nova.compute.manager [-] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1497.985351] env[62816]: DEBUG nova.network.neutron [-] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1498.028968] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2396ff7d-3c36-45dc-b25c-7461ff8dd938 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.036928] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29826cf3-d317-4ec3-a692-a6740d81d4ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.075560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "refresh_cache-0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1498.075764] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired lock "refresh_cache-0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.075960] env[62816]: DEBUG nova.network.neutron [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1498.084618] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa03c22f-4737-4e11-86a6-aab115ecbd71 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.095450] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1498.095450] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528b9e9f-6201-1558-a2b3-22418d876592" [ 1498.095450] env[62816]: _type = "HttpNfcLease" [ 1498.095450] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1498.098422] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1498.098422] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528b9e9f-6201-1558-a2b3-22418d876592" [ 1498.098422] env[62816]: _type = "HttpNfcLease" [ 1498.098422] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1498.099683] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d0c4db-5e4f-46f3-a7b3-b7c17dcbb904 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.104129] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c1bef8-4850-4db9-b1e3-55a40995af99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.113436] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788231, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.120186] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c279a7-4ff5-bb26-27bc-1fa74d792a2c/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1498.124066] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c279a7-4ff5-bb26-27bc-1fa74d792a2c/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1498.131567] env[62816]: DEBUG nova.compute.provider_tree [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.135401] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.135720] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.135945] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Deleting the datastore file [datastore1] f6ddaab3-d420-4ee4-bf75-486228826635 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1498.193395] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-794d2f8f-3dc1-4c31-a98d-a597db29376b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.205955] env[62816]: DEBUG oslo_vmware.api [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1498.205955] env[62816]: value = "task-1788234" [ 1498.205955] env[62816]: _type = "Task" [ 1498.205955] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.216996] env[62816]: DEBUG oslo_vmware.api [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.266744] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-307166e4-6828-4fdc-abca-1e8f8986ad18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.381476] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788233, 'name': ReconfigVM_Task, 'duration_secs': 0.424896} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.382601] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Reconfigured VM instance instance-00000022 to attach disk [datastore1] b409568f-6e04-4218-8a7b-1bbf785115c3/b409568f-6e04-4218-8a7b-1bbf785115c3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1498.382975] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e6b72a5-4d24-4f7e-beb8-e8e660cc85f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.390708] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1498.390708] env[62816]: value = "task-1788235" [ 1498.390708] env[62816]: _type = "Task" [ 1498.390708] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.399395] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788235, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.565823] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1498.566114] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371007', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'name': 'volume-f19290f7-d256-4b28-a0db-14a599c23011', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b10aca0-950b-46f6-8367-5cb9ea7540c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'serial': 'f19290f7-d256-4b28-a0db-14a599c23011'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1498.567204] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d300bc60-6fd3-4a94-a599-bbd2de73a1a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.584056] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f81a08d-15cb-4aa1-917a-996c36a109d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.613327] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] volume-f19290f7-d256-4b28-a0db-14a599c23011/volume-f19290f7-d256-4b28-a0db-14a599c23011.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1498.617409] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c0169ef-1f59-4528-8dfd-8339158fca85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.636368] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788231, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699106} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.638327] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d16a99df-f092-4d56-9730-852883bbdb70/d16a99df-f092-4d56-9730-852883bbdb70.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1498.638548] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1498.638951] env[62816]: DEBUG oslo_vmware.api [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Waiting for the task: (returnval){ [ 1498.638951] env[62816]: value = "task-1788236" [ 1498.638951] env[62816]: _type = "Task" [ 1498.638951] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.639244] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1697b2f4-e61f-4052-9365-77f3b7619ce7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.649994] env[62816]: DEBUG oslo_vmware.api [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788236, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.650816] env[62816]: DEBUG nova.network.neutron [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1498.654481] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1498.654481] env[62816]: value = "task-1788237" [ 1498.654481] env[62816]: _type = "Task" [ 1498.654481] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.665332] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788237, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.694029] env[62816]: DEBUG nova.scheduler.client.report [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1498.717134] env[62816]: DEBUG oslo_vmware.api [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.401072} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.718168] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1498.718383] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1498.718643] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1498.718852] env[62816]: INFO nova.compute.manager [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1498.719235] env[62816]: DEBUG oslo.service.loopingcall [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.719863] env[62816]: DEBUG nova.compute.manager [-] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1498.719968] env[62816]: DEBUG nova.network.neutron [-] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1498.886436] env[62816]: DEBUG nova.network.neutron [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Updating instance_info_cache with network_info: [{"id": "c9c1cb74-1895-4673-9834-96675448ee76", "address": "fa:16:3e:f2:2e:a4", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9c1cb74-18", "ovs_interfaceid": "c9c1cb74-1895-4673-9834-96675448ee76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.906354] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788235, 'name': Rename_Task, 'duration_secs': 0.271505} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.908098] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1498.908098] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e95cec7d-6b76-49c0-84e4-21ace5e22323 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.916077] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1498.916077] env[62816]: value = "task-1788238" [ 1498.916077] env[62816]: _type = "Task" [ 1498.916077] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.924510] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.953977] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1498.974683] env[62816]: DEBUG nova.network.neutron [-] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.985255] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1498.985543] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1498.986179] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1498.986561] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1498.986783] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1498.987178] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1498.987485] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1498.987717] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1498.987904] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1498.991019] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1498.991019] env[62816]: DEBUG nova.virt.hardware [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1498.991019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f34ea9-b37d-4c27-baf5-61304000019f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.999323] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e0529d-e8e2-4ad9-9ac9-f2e03dabfdfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.154138] env[62816]: DEBUG oslo_vmware.api [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788236, 'name': ReconfigVM_Task, 'duration_secs': 0.47326} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.154138] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Reconfigured VM instance instance-00000008 to attach disk [datastore1] volume-f19290f7-d256-4b28-a0db-14a599c23011/volume-f19290f7-d256-4b28-a0db-14a599c23011.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.158965] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65248012-cee8-4b6d-97a4-43631de84207 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.181539] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788237, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069148} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.183016] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1499.186532] env[62816]: DEBUG oslo_vmware.api [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Waiting for the task: (returnval){ [ 1499.186532] env[62816]: value = "task-1788239" [ 1499.186532] env[62816]: _type = "Task" [ 1499.186532] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.187449] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d759391-72c2-4a72-9e69-6711cf9dda5b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.207199] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.286s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.217691] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] d16a99df-f092-4d56-9730-852883bbdb70/d16a99df-f092-4d56-9730-852883bbdb70.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1499.223248] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.661s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.223248] env[62816]: DEBUG nova.objects.instance [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lazy-loading 'resources' on Instance uuid a6b06048-6cdc-497e-8c5d-b6a26d3e7557 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1499.223402] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a903c629-33f3-482d-9362-8418b412e03b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.238416] env[62816]: DEBUG oslo_vmware.api [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788239, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.246468] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1499.246468] env[62816]: value = "task-1788240" [ 1499.246468] env[62816]: _type = "Task" [ 1499.246468] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.259299] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788240, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.260921] env[62816]: INFO nova.scheduler.client.report [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Deleted allocations for instance 128bd207-a483-4b38-9fd4-4fb996ce1d0d [ 1499.389441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Releasing lock "refresh_cache-0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.390469] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Instance network_info: |[{"id": "c9c1cb74-1895-4673-9834-96675448ee76", "address": "fa:16:3e:f2:2e:a4", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9c1cb74-18", "ovs_interfaceid": "c9c1cb74-1895-4673-9834-96675448ee76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1499.391096] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:2e:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9c1cb74-1895-4673-9834-96675448ee76', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1499.401159] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Creating folder: Project (6f4b3d1d951945a7a7f808588e3c7c93). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1499.402250] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-607552c7-2c06-4b9e-bf29-c374cb6b3201 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.414853] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Created folder: Project (6f4b3d1d951945a7a7f808588e3c7c93) in parent group-v370905. [ 1499.415236] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Creating folder: Instances. Parent ref: group-v371011. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1499.416327] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c4d3b82-f2a6-489c-b88d-9016e79a64d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.430380] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788238, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.433944] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Created folder: Instances in parent group-v371011. [ 1499.433944] env[62816]: DEBUG oslo.service.loopingcall [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.433944] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1499.433944] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-621cc963-06c1-4c6f-a854-b3af86f353a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.452568] env[62816]: DEBUG nova.compute.manager [req-283dabec-aa56-4e83-8e40-b54e434d7849 req-d0a2f066-768d-4bc7-8354-6f3692929280 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Received event network-vif-deleted-de736438-152f-4337-ae73-74024c1cac15 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.453255] env[62816]: INFO nova.compute.manager [req-283dabec-aa56-4e83-8e40-b54e434d7849 req-d0a2f066-768d-4bc7-8354-6f3692929280 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Neutron deleted interface de736438-152f-4337-ae73-74024c1cac15; detaching it from the instance and deleting it from the info cache [ 1499.453255] env[62816]: DEBUG nova.network.neutron [req-283dabec-aa56-4e83-8e40-b54e434d7849 req-d0a2f066-768d-4bc7-8354-6f3692929280 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.460386] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1499.460386] env[62816]: value = "task-1788243" [ 1499.460386] env[62816]: _type = "Task" [ 1499.460386] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.470319] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788243, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.479618] env[62816]: INFO nova.compute.manager [-] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Took 1.49 seconds to deallocate network for instance. [ 1499.494243] env[62816]: DEBUG nova.compute.manager [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Received event network-changed-c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.494391] env[62816]: DEBUG nova.compute.manager [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Refreshing instance network info cache due to event network-changed-c9c1cb74-1895-4673-9834-96675448ee76. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1499.494617] env[62816]: DEBUG oslo_concurrency.lockutils [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] Acquiring lock "refresh_cache-0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.494766] env[62816]: DEBUG oslo_concurrency.lockutils [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] Acquired lock "refresh_cache-0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.494963] env[62816]: DEBUG nova.network.neutron [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Refreshing network info cache for port c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1499.706037] env[62816]: DEBUG oslo_vmware.api [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788239, 'name': ReconfigVM_Task, 'duration_secs': 0.348684} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.706037] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371007', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'name': 'volume-f19290f7-d256-4b28-a0db-14a599c23011', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b10aca0-950b-46f6-8367-5cb9ea7540c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'serial': 'f19290f7-d256-4b28-a0db-14a599c23011'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1499.762557] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788240, 'name': ReconfigVM_Task, 'duration_secs': 0.297869} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.763210] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Reconfigured VM instance instance-00000023 to attach disk [datastore1] d16a99df-f092-4d56-9730-852883bbdb70/d16a99df-f092-4d56-9730-852883bbdb70.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1499.765415] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-224b7b66-af78-484f-b7f5-bf9041bd02a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.777929] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1499.777929] env[62816]: value = "task-1788244" [ 1499.777929] env[62816]: _type = "Task" [ 1499.777929] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.778935] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7bfbc98-7d47-4551-80c3-1137a88f4445 tempest-ServersAaction247Test-1443363167 tempest-ServersAaction247Test-1443363167-project-member] Lock "128bd207-a483-4b38-9fd4-4fb996ce1d0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.968s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.795875] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788244, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.893263] env[62816]: DEBUG nova.network.neutron [-] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.925578] env[62816]: DEBUG nova.compute.manager [req-e1851367-c5fc-4b40-b4f5-47b6e883726e req-29d78cfe-2079-4e97-9b7f-592e820c317b service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Received event network-vif-plugged-e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1499.926101] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1851367-c5fc-4b40-b4f5-47b6e883726e req-29d78cfe-2079-4e97-9b7f-592e820c317b service nova] Acquiring lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.927949] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1851367-c5fc-4b40-b4f5-47b6e883726e req-29d78cfe-2079-4e97-9b7f-592e820c317b service nova] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.927949] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1851367-c5fc-4b40-b4f5-47b6e883726e req-29d78cfe-2079-4e97-9b7f-592e820c317b service nova] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.927949] env[62816]: DEBUG nova.compute.manager [req-e1851367-c5fc-4b40-b4f5-47b6e883726e req-29d78cfe-2079-4e97-9b7f-592e820c317b service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] No waiting events found dispatching network-vif-plugged-e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1499.927949] env[62816]: WARNING nova.compute.manager [req-e1851367-c5fc-4b40-b4f5-47b6e883726e req-29d78cfe-2079-4e97-9b7f-592e820c317b service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Received unexpected event network-vif-plugged-e5ca1bb3-2557-4130-9f1b-0efec87f923a for instance with vm_state building and task_state spawning. [ 1499.936210] env[62816]: DEBUG oslo_vmware.api [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788238, 'name': PowerOnVM_Task, 'duration_secs': 0.735959} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.937396] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1499.937881] env[62816]: INFO nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1499.938193] env[62816]: DEBUG nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1499.939312] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cae855a-9abd-4e2f-adca-0f5418ab339b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.959161] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d97a455c-d66b-4fcd-9c17-a2fa92215121 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.971891] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788243, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.978433] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1526809a-b0ee-43ea-9c32-9bf4efb2d4ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.993546] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.024904] env[62816]: DEBUG nova.compute.manager [req-283dabec-aa56-4e83-8e40-b54e434d7849 req-d0a2f066-768d-4bc7-8354-6f3692929280 service nova] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Detach interface failed, port_id=de736438-152f-4337-ae73-74024c1cac15, reason: Instance f6ddaab3-d420-4ee4-bf75-486228826635 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1500.297918] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788244, 'name': Rename_Task, 'duration_secs': 0.150685} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.297918] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1500.297918] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58c97bdd-1cf7-46ac-9742-ffbcc23c0103 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.305845] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1500.305845] env[62816]: value = "task-1788245" [ 1500.305845] env[62816]: _type = "Task" [ 1500.305845] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.319301] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788245, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.331334] env[62816]: DEBUG nova.network.neutron [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Updated VIF entry in instance network info cache for port c9c1cb74-1895-4673-9834-96675448ee76. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1500.331880] env[62816]: DEBUG nova.network.neutron [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Updating instance_info_cache with network_info: [{"id": "c9c1cb74-1895-4673-9834-96675448ee76", "address": "fa:16:3e:f2:2e:a4", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.79", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9c1cb74-18", "ovs_interfaceid": "c9c1cb74-1895-4673-9834-96675448ee76", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.397961] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54392127-ae90-4ef4-9c36-c80fa159b6ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.399228] env[62816]: INFO nova.compute.manager [-] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Took 1.68 seconds to deallocate network for instance. [ 1500.407819] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d07372e-3ab2-40fb-9a32-3d34388b18ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.439846] env[62816]: DEBUG nova.network.neutron [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Successfully updated port: e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1500.441759] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54beb87f-cda3-46c8-84d2-db387e1777ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.450787] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1b5e9a-7046-4bd8-b41a-8c12b7682416 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.466885] env[62816]: INFO nova.compute.manager [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Took 49.39 seconds to build instance. [ 1500.483810] env[62816]: DEBUG nova.compute.provider_tree [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1500.485638] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e2569cdb-d473-421d-bec4-ad3cfb2e6f64 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.291s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.492420] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788243, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.771094] env[62816]: DEBUG nova.objects.instance [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lazy-loading 'flavor' on Instance uuid 0b10aca0-950b-46f6-8367-5cb9ea7540c8 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1500.817073] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788245, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.836152] env[62816]: DEBUG oslo_concurrency.lockutils [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] Releasing lock "refresh_cache-0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.836431] env[62816]: DEBUG nova.compute.manager [req-29e31558-1f7f-46bd-937a-2565298f9845 req-0513fda1-0d88-4566-835b-068f3cfde4de service nova] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Received event network-vif-deleted-3d72b4f0-d5b8-433f-8f1a-0813299ad226 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1500.908037] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.946997] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "refresh_cache-afd02433-0912-44ef-8e0e-71d6ee8fbb41" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.946997] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquired lock "refresh_cache-afd02433-0912-44ef-8e0e-71d6ee8fbb41" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.946997] env[62816]: DEBUG nova.network.neutron [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1500.976047] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788243, 'name': CreateVM_Task, 'duration_secs': 1.464001} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.976336] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1500.976991] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1500.977230] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1500.977946] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1500.978290] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f797eb-27cc-460a-84d5-f335615dd7f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.983322] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1500.983322] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52be1993-cd4e-b39b-9f6f-5b5fc6738110" [ 1500.983322] env[62816]: _type = "Task" [ 1500.983322] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.988006] env[62816]: DEBUG nova.scheduler.client.report [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1500.994612] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1501.000938] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52be1993-cd4e-b39b-9f6f-5b5fc6738110, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.276051] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee4dc0d7-5d2c-4070-aaff-014943fff64c tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.319s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.317195] env[62816]: DEBUG oslo_vmware.api [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788245, 'name': PowerOnVM_Task, 'duration_secs': 0.558505} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.317195] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1501.317195] env[62816]: INFO nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Took 7.64 seconds to spawn the instance on the hypervisor. [ 1501.317402] env[62816]: DEBUG nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1501.318074] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e90deca-b41d-4b35-9b55-073d77f608fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.494770] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.273s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.500969] env[62816]: DEBUG nova.network.neutron [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1501.503190] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.628s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.504751] env[62816]: INFO nova.compute.claims [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1501.511027] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52be1993-cd4e-b39b-9f6f-5b5fc6738110, 'name': SearchDatastore_Task, 'duration_secs': 0.015609} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.511027] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1501.511027] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1501.511027] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.511027] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1501.511027] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1501.513223] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-253d4955-d9af-47ab-9ab8-7ea8c1b430ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.523373] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1501.523667] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1501.524756] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30a74c36-92d9-42bd-9d04-be01910b4051 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.529511] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.530367] env[62816]: INFO nova.scheduler.client.report [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleted allocations for instance a6b06048-6cdc-497e-8c5d-b6a26d3e7557 [ 1501.538230] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1501.538230] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ed1f2a-6502-db9e-92fa-8d9f8547f4f6" [ 1501.538230] env[62816]: _type = "Task" [ 1501.538230] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.553015] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ed1f2a-6502-db9e-92fa-8d9f8547f4f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.627555] env[62816]: DEBUG nova.compute.manager [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Received event network-changed-e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1501.627761] env[62816]: DEBUG nova.compute.manager [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Refreshing instance network info cache due to event network-changed-e5ca1bb3-2557-4130-9f1b-0efec87f923a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1501.627993] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] Acquiring lock "refresh_cache-afd02433-0912-44ef-8e0e-71d6ee8fbb41" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1501.694570] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.694875] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.712114] env[62816]: DEBUG nova.network.neutron [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Updating instance_info_cache with network_info: [{"id": "e5ca1bb3-2557-4130-9f1b-0efec87f923a", "address": "fa:16:3e:20:f1:8b", "network": {"id": "9ac9532f-4b83-4fbc-976d-1c992e316103", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1389306239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6541e22d1349eb9818ec4c59270c5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5ca1bb3-25", "ovs_interfaceid": "e5ca1bb3-2557-4130-9f1b-0efec87f923a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1501.762319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "b409568f-6e04-4218-8a7b-1bbf785115c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.762585] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.762795] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "b409568f-6e04-4218-8a7b-1bbf785115c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.762979] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.763194] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1501.766335] env[62816]: INFO nova.compute.manager [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Terminating instance [ 1501.768128] env[62816]: DEBUG nova.compute.manager [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1501.768327] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1501.769279] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f789487-b84a-4b31-9b11-f1ddeaef7125 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.776793] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1501.777041] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05af6395-39f0-43f3-904e-917319f7f0bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.784291] env[62816]: DEBUG oslo_vmware.api [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1501.784291] env[62816]: value = "task-1788246" [ 1501.784291] env[62816]: _type = "Task" [ 1501.784291] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.799096] env[62816]: DEBUG oslo_vmware.api [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.836460] env[62816]: INFO nova.compute.manager [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Took 48.06 seconds to build instance. [ 1502.022709] env[62816]: DEBUG nova.compute.manager [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Received event network-changed-64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.022709] env[62816]: DEBUG nova.compute.manager [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Refreshing instance network info cache due to event network-changed-64790bf9-4e84-424e-a85d-819c0d6cade8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1502.023079] env[62816]: DEBUG oslo_concurrency.lockutils [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] Acquiring lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1502.023079] env[62816]: DEBUG oslo_concurrency.lockutils [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] Acquired lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.023079] env[62816]: DEBUG nova.network.neutron [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Refreshing network info cache for port 64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.044296] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9415b2a1-ba09-4ab6-8b7c-a21d183fac24 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "a6b06048-6cdc-497e-8c5d-b6a26d3e7557" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.427s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.055185] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ed1f2a-6502-db9e-92fa-8d9f8547f4f6, 'name': SearchDatastore_Task, 'duration_secs': 0.039162} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.056036] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e970a43a-655a-4130-a7ad-64318ac56e48 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.062764] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1502.062764] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525a5c4e-36e7-5afe-2118-33c13efbd254" [ 1502.062764] env[62816]: _type = "Task" [ 1502.062764] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.071609] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525a5c4e-36e7-5afe-2118-33c13efbd254, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.199609] env[62816]: INFO nova.compute.manager [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Detaching volume f19290f7-d256-4b28-a0db-14a599c23011 [ 1502.215251] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Releasing lock "refresh_cache-afd02433-0912-44ef-8e0e-71d6ee8fbb41" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1502.215707] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Instance network_info: |[{"id": "e5ca1bb3-2557-4130-9f1b-0efec87f923a", "address": "fa:16:3e:20:f1:8b", "network": {"id": "9ac9532f-4b83-4fbc-976d-1c992e316103", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1389306239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6541e22d1349eb9818ec4c59270c5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5ca1bb3-25", "ovs_interfaceid": "e5ca1bb3-2557-4130-9f1b-0efec87f923a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1502.216446] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] Acquired lock "refresh_cache-afd02433-0912-44ef-8e0e-71d6ee8fbb41" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1502.216679] env[62816]: DEBUG nova.network.neutron [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Refreshing network info cache for port e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1502.218311] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:f1:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5ca1bb3-2557-4130-9f1b-0efec87f923a', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1502.226508] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Creating folder: Project (2c6541e22d1349eb9818ec4c59270c5d). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1502.230132] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e65964b2-51a9-4428-9f7b-14ad8f341669 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.239303] env[62816]: INFO nova.virt.block_device [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Attempting to driver detach volume f19290f7-d256-4b28-a0db-14a599c23011 from mountpoint /dev/sdb [ 1502.239611] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1502.239816] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371007', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'name': 'volume-f19290f7-d256-4b28-a0db-14a599c23011', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b10aca0-950b-46f6-8367-5cb9ea7540c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'serial': 'f19290f7-d256-4b28-a0db-14a599c23011'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1502.241107] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ca0a8a-c1e1-468d-9b8f-1d4085c6085f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.245389] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Created folder: Project (2c6541e22d1349eb9818ec4c59270c5d) in parent group-v370905. [ 1502.245629] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Creating folder: Instances. Parent ref: group-v371014. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1502.246379] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-85496263-8d79-428d-b804-4c944db256be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.272382] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67437162-194d-4d6d-8ab1-7e59da3b73e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.274972] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Created folder: Instances in parent group-v371014. [ 1502.275273] env[62816]: DEBUG oslo.service.loopingcall [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1502.275503] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1502.276349] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d65399d2-c901-467f-b4b7-73eec45f2679 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.297591] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d15f2c-c857-4259-bf93-db1c34480134 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.301742] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1502.301742] env[62816]: value = "task-1788249" [ 1502.301742] env[62816]: _type = "Task" [ 1502.301742] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.323443] env[62816]: DEBUG oslo_vmware.api [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788246, 'name': PowerOffVM_Task, 'duration_secs': 0.252208} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.326817] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b6f8f2-c608-45fa-ba28-03eae708a479 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.329750] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1502.329937] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1502.333100] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ca8b23f-5a39-45a5-8b5f-0361e8e82a6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.335047] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788249, 'name': CreateVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.352172] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b9ebbf46-67c4-4b09-beb9-6c6287f6cbfe tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.985s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1502.352484] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] The volume has not been displaced from its original location: [datastore1] volume-f19290f7-d256-4b28-a0db-14a599c23011/volume-f19290f7-d256-4b28-a0db-14a599c23011.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1502.360705] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Reconfiguring VM instance instance-00000008 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1502.369642] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5594ef66-8bf1-47b5-8b3f-4a33c5e0576b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.386104] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1502.398671] env[62816]: DEBUG oslo_vmware.api [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Waiting for the task: (returnval){ [ 1502.398671] env[62816]: value = "task-1788251" [ 1502.398671] env[62816]: _type = "Task" [ 1502.398671] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.155165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "d16a99df-f092-4d56-9730-852883bbdb70" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.155165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.155548] env[62816]: INFO nova.compute.manager [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Rebooting instance [ 1503.156957] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "cf6ff174-1324-42bd-a77a-905b9a333c27" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.157094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.157292] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "cf6ff174-1324-42bd-a77a-905b9a333c27-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.157469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1503.157629] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1503.160679] env[62816]: DEBUG oslo_vmware.api [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788251, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.160872] env[62816]: WARNING oslo_vmware.common.loopingcall [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] task run outlasted interval by 0.26196299999999995 sec [ 1503.165058] env[62816]: INFO nova.compute.manager [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Terminating instance [ 1503.175148] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1503.175391] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1503.175610] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleting the datastore file [datastore1] b409568f-6e04-4218-8a7b-1bbf785115c3 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1503.176350] env[62816]: DEBUG nova.compute.manager [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1503.176641] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1503.181670] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8ec32d4-50c5-4fff-9165-13f66c85c2de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.182879] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161196ad-84ea-4b5b-ba64-76dd43675140 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.198467] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1503.199053] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788249, 'name': CreateVM_Task, 'duration_secs': 0.439291} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.199289] env[62816]: DEBUG oslo_vmware.api [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788251, 'name': ReconfigVM_Task, 'duration_secs': 0.230317} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.199493] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525a5c4e-36e7-5afe-2118-33c13efbd254, 'name': SearchDatastore_Task, 'duration_secs': 0.032406} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.204412] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1503.204689] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Reconfigured VM instance instance-00000008 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1503.209233] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.209501] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf/0707fdd6-2aed-4a09-90e0-c7fb0eae6acf.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1503.209779] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1503.210378] env[62816]: DEBUG oslo_vmware.api [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1503.210378] env[62816]: value = "task-1788252" [ 1503.210378] env[62816]: _type = "Task" [ 1503.210378] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.211182] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.211366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.212521] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1503.212521] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fb424bb-9b40-4fe1-91c3-b466935f23d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.221839] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeade81d-49e3-4a42-bee3-1e901a0b0858 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.224309] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-399ef732-8d3c-47aa-bc60-fcb4e02df055 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.229740] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45558b6f-cddf-40d9-b6a8-b9a64c59b346 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.246531] env[62816]: DEBUG oslo_vmware.api [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Waiting for the task: (returnval){ [ 1503.246531] env[62816]: value = "task-1788255" [ 1503.246531] env[62816]: _type = "Task" [ 1503.246531] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.246762] env[62816]: DEBUG oslo_vmware.api [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788252, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.247024] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1503.247024] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c530be-c292-6952-81ff-cd2c06eb8b2f" [ 1503.247024] env[62816]: _type = "Task" [ 1503.247024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.252047] env[62816]: DEBUG oslo_vmware.api [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1503.252047] env[62816]: value = "task-1788254" [ 1503.252047] env[62816]: _type = "Task" [ 1503.252047] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.252590] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1503.252590] env[62816]: value = "task-1788253" [ 1503.252590] env[62816]: _type = "Task" [ 1503.252590] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.270544] env[62816]: DEBUG oslo_vmware.api [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788255, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.281133] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c530be-c292-6952-81ff-cd2c06eb8b2f, 'name': SearchDatastore_Task, 'duration_secs': 0.023835} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.281478] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.281698] env[62816]: DEBUG oslo_vmware.api [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.284924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.285259] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1503.285531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.285686] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1503.285861] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1503.286693] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1ea6ec3-a85d-45a6-a325-262e50b8ce13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.295833] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1503.296031] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1503.296802] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3846b0c2-0ea1-4e61-b2f1-64c22ddf2d8d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.309771] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1503.309771] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52855b8e-1f5e-3911-910d-486d19ee3c50" [ 1503.309771] env[62816]: _type = "Task" [ 1503.309771] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.318902] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52855b8e-1f5e-3911-910d-486d19ee3c50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.392339] env[62816]: DEBUG nova.network.neutron [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Updated VIF entry in instance network info cache for port e5ca1bb3-2557-4130-9f1b-0efec87f923a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1503.392617] env[62816]: DEBUG nova.network.neutron [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Updating instance_info_cache with network_info: [{"id": "e5ca1bb3-2557-4130-9f1b-0efec87f923a", "address": "fa:16:3e:20:f1:8b", "network": {"id": "9ac9532f-4b83-4fbc-976d-1c992e316103", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1389306239-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6541e22d1349eb9818ec4c59270c5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5ca1bb3-25", "ovs_interfaceid": "e5ca1bb3-2557-4130-9f1b-0efec87f923a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.499553] env[62816]: DEBUG nova.network.neutron [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updated VIF entry in instance network info cache for port 64790bf9-4e84-424e-a85d-819c0d6cade8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1503.499931] env[62816]: DEBUG nova.network.neutron [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [{"id": "64790bf9-4e84-424e-a85d-819c0d6cade8", "address": "fa:16:3e:51:d0:5a", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64790bf9-4e", "ovs_interfaceid": "64790bf9-4e84-424e-a85d-819c0d6cade8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1503.689908] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1503.738696] env[62816]: DEBUG oslo_vmware.api [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788252, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.275736} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.740362] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1503.740699] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1503.741018] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1503.741555] env[62816]: INFO nova.compute.manager [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Took 1.97 seconds to destroy the instance on the hypervisor. [ 1503.741662] env[62816]: DEBUG oslo.service.loopingcall [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1503.742807] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b25db7d-a941-4963-bcb6-df8d4914abfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.747175] env[62816]: DEBUG nova.compute.manager [-] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1503.747341] env[62816]: DEBUG nova.network.neutron [-] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1503.758127] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a10cc2-eb4d-4520-988a-6c3986c0210b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.771176] env[62816]: DEBUG oslo_vmware.api [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Task: {'id': task-1788255, 'name': ReconfigVM_Task, 'duration_secs': 0.167462} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.797915] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371007', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'name': 'volume-f19290f7-d256-4b28-a0db-14a599c23011', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0b10aca0-950b-46f6-8367-5cb9ea7540c8', 'attached_at': '', 'detached_at': '', 'volume_id': 'f19290f7-d256-4b28-a0db-14a599c23011', 'serial': 'f19290f7-d256-4b28-a0db-14a599c23011'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1503.809583] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e33187-e3e0-4ea6-9099-3f7f7406740e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.814956] env[62816]: DEBUG oslo_vmware.api [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788254, 'name': PowerOffVM_Task, 'duration_secs': 0.225545} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.814956] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788253, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.814956] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1503.814956] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1503.822973] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8960f830-9311-4eb3-bffe-e18aff87cb91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.828645] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f3f357-2a31-4074-9c71-7b707fb4bae8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.837424] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52855b8e-1f5e-3911-910d-486d19ee3c50, 'name': SearchDatastore_Task, 'duration_secs': 0.014139} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.838723] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-724800ac-4622-4f8a-935c-3673a9b03d14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.850173] env[62816]: DEBUG nova.compute.provider_tree [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1503.855993] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1503.855993] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ef4fe5-19bb-1779-dc5a-fa0fa2a2e51f" [ 1503.855993] env[62816]: _type = "Task" [ 1503.855993] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.867926] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ef4fe5-19bb-1779-dc5a-fa0fa2a2e51f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.895920] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ad97ae3-e5dc-464d-a53c-fe3176c937ad req-8cc2200d-f57c-4085-94ac-d8e5c5ab2217 service nova] Releasing lock "refresh_cache-afd02433-0912-44ef-8e0e-71d6ee8fbb41" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1503.908883] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1503.909286] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1503.909579] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleting the datastore file [datastore1] cf6ff174-1324-42bd-a77a-905b9a333c27 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1503.909924] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f4b3cfe8-6423-4435-8373-0b5f20b1c4cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.917975] env[62816]: DEBUG oslo_vmware.api [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1503.917975] env[62816]: value = "task-1788257" [ 1503.917975] env[62816]: _type = "Task" [ 1503.917975] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.928940] env[62816]: DEBUG oslo_vmware.api [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788257, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.004823] env[62816]: DEBUG oslo_concurrency.lockutils [req-f89a67db-79a5-40d9-ae4a-1f5971633fc2 req-c3dfdd6b-323a-4986-ad7d-5e60509339d4 service nova] Releasing lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.005581] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquired lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1504.005878] env[62816]: DEBUG nova.network.neutron [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1504.121432] env[62816]: DEBUG nova.compute.manager [req-860d5bf2-626d-42fa-90d5-d4d64052bdcc req-e08edecd-bc9d-4dee-a6ad-d2856d3fbc27 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Received event network-vif-deleted-6069e840-7095-4621-bf07-1d83bb93ce9d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1504.121592] env[62816]: INFO nova.compute.manager [req-860d5bf2-626d-42fa-90d5-d4d64052bdcc req-e08edecd-bc9d-4dee-a6ad-d2856d3fbc27 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Neutron deleted interface 6069e840-7095-4621-bf07-1d83bb93ce9d; detaching it from the instance and deleting it from the info cache [ 1504.121762] env[62816]: DEBUG nova.network.neutron [req-860d5bf2-626d-42fa-90d5-d4d64052bdcc req-e08edecd-bc9d-4dee-a6ad-d2856d3fbc27 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.272901] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.850726} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.273290] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf/0707fdd6-2aed-4a09-90e0-c7fb0eae6acf.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1504.273484] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1504.273747] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90f6bb19-8e32-4283-af63-854e53765443 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.281599] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1504.281599] env[62816]: value = "task-1788258" [ 1504.281599] env[62816]: _type = "Task" [ 1504.281599] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.289785] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788258, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.354046] env[62816]: DEBUG nova.scheduler.client.report [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1504.367749] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ef4fe5-19bb-1779-dc5a-fa0fa2a2e51f, 'name': SearchDatastore_Task, 'duration_secs': 0.120407} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.369110] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1504.369110] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] afd02433-0912-44ef-8e0e-71d6ee8fbb41/afd02433-0912-44ef-8e0e-71d6ee8fbb41.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1504.369110] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69eb3e18-23ae-43f7-af1d-90b299e21fea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.371736] env[62816]: DEBUG nova.objects.instance [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lazy-loading 'flavor' on Instance uuid 0b10aca0-950b-46f6-8367-5cb9ea7540c8 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.378983] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1504.378983] env[62816]: value = "task-1788259" [ 1504.378983] env[62816]: _type = "Task" [ 1504.378983] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.390022] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788259, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.429401] env[62816]: DEBUG oslo_vmware.api [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788257, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.4098} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.431023] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1504.431023] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1504.431023] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1504.431023] env[62816]: INFO nova.compute.manager [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1504.431023] env[62816]: DEBUG oslo.service.loopingcall [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1504.431023] env[62816]: DEBUG nova.compute.manager [-] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1504.431023] env[62816]: DEBUG nova.network.neutron [-] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1504.603149] env[62816]: DEBUG nova.network.neutron [-] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.625260] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b67949c-1c6b-4ff1-90ad-109abaf06ab7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.635080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdbde80-9205-4a18-b25d-d04c918f0d70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.674679] env[62816]: DEBUG nova.compute.manager [req-860d5bf2-626d-42fa-90d5-d4d64052bdcc req-e08edecd-bc9d-4dee-a6ad-d2856d3fbc27 service nova] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Detach interface failed, port_id=6069e840-7095-4621-bf07-1d83bb93ce9d, reason: Instance b409568f-6e04-4218-8a7b-1bbf785115c3 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1504.752865] env[62816]: DEBUG nova.network.neutron [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [{"id": "64790bf9-4e84-424e-a85d-819c0d6cade8", "address": "fa:16:3e:51:d0:5a", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64790bf9-4e", "ovs_interfaceid": "64790bf9-4e84-424e-a85d-819c0d6cade8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1504.793021] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788258, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.135476} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.793399] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1504.794384] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9ea51a9-7767-48fb-89ae-ddb77f65b8ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.818625] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf/0707fdd6-2aed-4a09-90e0-c7fb0eae6acf.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1504.819041] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-863f22e5-19e3-43fc-a68b-c21fa57ce6a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.840212] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1504.840212] env[62816]: value = "task-1788260" [ 1504.840212] env[62816]: _type = "Task" [ 1504.840212] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1504.849129] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788260, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.862294] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.359s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1504.862860] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1504.865730] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.324s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1504.866504] env[62816]: DEBUG nova.objects.instance [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lazy-loading 'resources' on Instance uuid 99bd7579-7097-41df-a8c0-e12a3863a3dc {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1504.893496] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788259, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.108990] env[62816]: INFO nova.compute.manager [-] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Took 1.36 seconds to deallocate network for instance. [ 1505.256663] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Releasing lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1505.259899] env[62816]: DEBUG nova.compute.manager [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1505.261263] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8577067e-ef20-44ce-90af-40c75eaaeca7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.266381] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "fb84cb48-d1a1-4eec-adb8-8edc585263df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.266711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.266992] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "fb84cb48-d1a1-4eec-adb8-8edc585263df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.267910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.267910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.277986] env[62816]: INFO nova.compute.manager [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Terminating instance [ 1505.281354] env[62816]: DEBUG nova.compute.manager [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1505.281660] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1505.282988] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d97fe03a-10bb-48ce-9222-088eddd6012d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.297103] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1505.300021] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a2e5ab4-1b41-4ccb-b3cf-1ab5e36e8beb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.306318] env[62816]: DEBUG oslo_vmware.api [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1505.306318] env[62816]: value = "task-1788261" [ 1505.306318] env[62816]: _type = "Task" [ 1505.306318] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.322047] env[62816]: DEBUG oslo_vmware.api [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.350846] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788260, 'name': ReconfigVM_Task, 'duration_secs': 0.497782} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.351191] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Reconfigured VM instance instance-00000024 to attach disk [datastore1] 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf/0707fdd6-2aed-4a09-90e0-c7fb0eae6acf.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1505.351923] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-78d68e3f-ad6f-41ae-920c-4063c92f78d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.358719] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1505.358719] env[62816]: value = "task-1788262" [ 1505.358719] env[62816]: _type = "Task" [ 1505.358719] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.367414] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788262, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.369790] env[62816]: DEBUG nova.compute.utils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1505.371290] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1505.371585] env[62816]: DEBUG nova.network.neutron [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1505.384998] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65230675-09fb-4b9a-bdf4-a34424ae239b tempest-VolumesAssistedSnapshotsTest-425214950 tempest-VolumesAssistedSnapshotsTest-425214950-project-admin] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.690s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.392478] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788259, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.442680] env[62816]: DEBUG nova.policy [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da3883d2c9f34a7282d7fda19aa1f4f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '830fc28618ac4a31856cca469d46a750', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1505.491831] env[62816]: DEBUG nova.network.neutron [-] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1505.618179] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.803725] env[62816]: DEBUG nova.network.neutron [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Successfully created port: 933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1505.820189] env[62816]: DEBUG oslo_vmware.api [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788261, 'name': PowerOffVM_Task, 'duration_secs': 0.241055} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.820790] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1505.821446] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1505.822100] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-570c8ccf-87af-4480-92c5-3461cfe8f60d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.870447] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788262, 'name': Rename_Task, 'duration_secs': 0.214578} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.870662] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1505.870909] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a8e8e8a-bf07-4a2f-8118-4d395e453efe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.875454] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1505.883038] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1505.883038] env[62816]: value = "task-1788265" [ 1505.883038] env[62816]: _type = "Task" [ 1505.883038] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.896439] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788259, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.174753} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1505.899803] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] afd02433-0912-44ef-8e0e-71d6ee8fbb41/afd02433-0912-44ef-8e0e-71d6ee8fbb41.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1505.899803] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1505.900056] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788265, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.900257] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7518379-4a58-49f2-ad08-d636a2c69248 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.909957] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1505.909957] env[62816]: value = "task-1788266" [ 1505.909957] env[62816]: _type = "Task" [ 1505.909957] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.916180] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dcb9364-93f6-4d0b-b475-a9a8f09b15b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.921838] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.926363] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4deae44-c66b-4a65-ab83-7cfaa6743408 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.958961] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ced974-5e04-48ef-aed4-6c7b752075bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.966833] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33e5116-daa2-46af-98f1-959185cb56af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.980745] env[62816]: DEBUG nova.compute.provider_tree [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.983155] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1505.983385] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1505.983582] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Deleting the datastore file [datastore1] fb84cb48-d1a1-4eec-adb8-8edc585263df {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1505.984090] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fed7101-0ea5-4533-a4af-2de6a920cb97 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.989821] env[62816]: DEBUG oslo_vmware.api [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for the task: (returnval){ [ 1505.989821] env[62816]: value = "task-1788267" [ 1505.989821] env[62816]: _type = "Task" [ 1505.989821] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1505.998891] env[62816]: INFO nova.compute.manager [-] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Took 1.57 seconds to deallocate network for instance. [ 1505.999198] env[62816]: DEBUG oslo_vmware.api [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.186079] env[62816]: DEBUG nova.compute.manager [req-697d2b82-4f28-41fb-a5fc-0541f1c6645a req-19ab7ab3-1cb4-44b0-bff5-380a380c666e service nova] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Received event network-vif-deleted-8cb4152c-bb70-4c55-b65d-1d1990432a62 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1506.292580] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e74dc4-002e-43b9-b2f0-130f167701e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.301678] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Doing hard reboot of VM {{(pid=62816) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1506.301960] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-288efdba-0c2b-45dd-9370-93a049afa0e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.311732] env[62816]: DEBUG oslo_vmware.api [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1506.311732] env[62816]: value = "task-1788268" [ 1506.311732] env[62816]: _type = "Task" [ 1506.311732] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.324607] env[62816]: DEBUG oslo_vmware.api [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788268, 'name': ResetVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.397920] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788265, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.419938] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788266, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.488021] env[62816]: DEBUG nova.scheduler.client.report [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1506.502011] env[62816]: DEBUG oslo_vmware.api [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Task: {'id': task-1788267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33189} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.502840] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1506.503224] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1506.503555] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1506.503834] env[62816]: INFO nova.compute.manager [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1506.504258] env[62816]: DEBUG oslo.service.loopingcall [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1506.505240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.505586] env[62816]: DEBUG nova.compute.manager [-] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1506.505804] env[62816]: DEBUG nova.network.neutron [-] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1506.822742] env[62816]: DEBUG oslo_vmware.api [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788268, 'name': ResetVM_Task, 'duration_secs': 0.104486} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.823179] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Did hard reboot of VM {{(pid=62816) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1506.823535] env[62816]: DEBUG nova.compute.manager [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1506.824806] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918d70f1-cb1e-45de-a143-3a6063a7112a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.887027] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1506.901514] env[62816]: DEBUG oslo_vmware.api [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788265, 'name': PowerOnVM_Task, 'duration_secs': 0.619485} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.901964] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1506.902417] env[62816]: INFO nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1506.903046] env[62816]: DEBUG nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1506.906262] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4104191-6307-4aae-86e1-78b266c5a7c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.918481] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1506.918750] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1506.918899] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1506.919211] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1506.919331] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1506.919389] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1506.919613] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1506.919846] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1506.919914] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1506.920144] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1506.920237] env[62816]: DEBUG nova.virt.hardware [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1506.924056] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c291d6-bcfd-4092-a994-3595f0addd08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.931585] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788266, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.006153} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.933718] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1506.934542] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e694b70e-8c7f-4251-baaa-9cf90fcdb759 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.937877] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce73b87b-9e94-424f-bb29-91fa75518b1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.970027] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] afd02433-0912-44ef-8e0e-71d6ee8fbb41/afd02433-0912-44ef-8e0e-71d6ee8fbb41.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1506.970783] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca61654b-7c47-4ba9-8e00-515f795f71a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.990555] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.125s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.994331] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.377s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.995953] env[62816]: INFO nova.compute.claims [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1506.998739] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1506.998739] env[62816]: value = "task-1788269" [ 1506.998739] env[62816]: _type = "Task" [ 1506.998739] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.009890] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788269, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.025671] env[62816]: INFO nova.scheduler.client.report [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Deleted allocations for instance 99bd7579-7097-41df-a8c0-e12a3863a3dc [ 1507.341106] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b256b1a4-9843-4c7f-8142-50db8f871394 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.186s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.406379] env[62816]: DEBUG nova.compute.manager [req-9effbe1f-5d80-4185-b290-167ece4cf435 req-ea64c0dd-7a13-4155-ab8f-b2b6db54dfc1 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Received event network-vif-deleted-b6ecd005-0fec-4275-91b7-0814f3514b40 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1507.406379] env[62816]: INFO nova.compute.manager [req-9effbe1f-5d80-4185-b290-167ece4cf435 req-ea64c0dd-7a13-4155-ab8f-b2b6db54dfc1 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Neutron deleted interface b6ecd005-0fec-4275-91b7-0814f3514b40; detaching it from the instance and deleting it from the info cache [ 1507.406379] env[62816]: DEBUG nova.network.neutron [req-9effbe1f-5d80-4185-b290-167ece4cf435 req-ea64c0dd-7a13-4155-ab8f-b2b6db54dfc1 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.438150] env[62816]: INFO nova.compute.manager [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Took 46.87 seconds to build instance. [ 1507.516961] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.534155] env[62816]: DEBUG oslo_concurrency.lockutils [None req-162cb598-f907-4d68-89ba-b9105f2f95dc tempest-FloatingIPsAssociationTestJSON-1596116505 tempest-FloatingIPsAssociationTestJSON-1596116505-project-member] Lock "99bd7579-7097-41df-a8c0-e12a3863a3dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.253s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.673745] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c279a7-4ff5-bb26-27bc-1fa74d792a2c/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1507.674742] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e812d541-9f9c-44af-9a24-51bbe20a9050 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.681922] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c279a7-4ff5-bb26-27bc-1fa74d792a2c/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1507.682110] env[62816]: ERROR oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c279a7-4ff5-bb26-27bc-1fa74d792a2c/disk-0.vmdk due to incomplete transfer. [ 1507.682339] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3a15c2c1-4576-44fa-90c3-6e80e2bd1e91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.689331] env[62816]: DEBUG oslo_vmware.rw_handles [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c279a7-4ff5-bb26-27bc-1fa74d792a2c/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1507.689878] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Uploaded image c5ecd3ac-4578-4849-be1d-6f8ad2bc4a2b to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1507.694222] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1507.697680] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3543137c-0f20-4065-ab8a-72a16fc2541e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.704706] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1507.704706] env[62816]: value = "task-1788270" [ 1507.704706] env[62816]: _type = "Task" [ 1507.704706] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1507.716119] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788270, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1507.720174] env[62816]: DEBUG nova.network.neutron [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Successfully updated port: 933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1507.848121] env[62816]: DEBUG nova.network.neutron [-] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1507.911351] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7076fcb8-ada5-420b-9370-e8f6e565de38 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.919524] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c87b528-d36a-44b7-9578-b588c03b0e32 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.940573] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f662b7f-e5b0-48df-af7b-2a2684149728 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.141s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.961293] env[62816]: DEBUG nova.compute.manager [req-9effbe1f-5d80-4185-b290-167ece4cf435 req-ea64c0dd-7a13-4155-ab8f-b2b6db54dfc1 service nova] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Detach interface failed, port_id=b6ecd005-0fec-4275-91b7-0814f3514b40, reason: Instance fb84cb48-d1a1-4eec-adb8-8edc585263df could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1508.015049] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788269, 'name': ReconfigVM_Task, 'duration_secs': 0.808709} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.015731] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Reconfigured VM instance instance-00000025 to attach disk [datastore1] afd02433-0912-44ef-8e0e-71d6ee8fbb41/afd02433-0912-44ef-8e0e-71d6ee8fbb41.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1508.016359] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-557ab0e1-4e77-45e3-a56b-5ac17ba41724 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.023177] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1508.023177] env[62816]: value = "task-1788271" [ 1508.023177] env[62816]: _type = "Task" [ 1508.023177] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.032923] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788271, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.215399] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788270, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.221593] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.221774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1508.221931] env[62816]: DEBUG nova.network.neutron [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1508.351456] env[62816]: INFO nova.compute.manager [-] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Took 1.85 seconds to deallocate network for instance. [ 1508.444354] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1508.490309] env[62816]: DEBUG nova.compute.manager [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Received event network-vif-plugged-933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1508.490309] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] Acquiring lock "e003e41d-93e8-4258-b8ca-3c2420b73df0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.490309] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.490519] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.490519] env[62816]: DEBUG nova.compute.manager [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] No waiting events found dispatching network-vif-plugged-933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1508.490664] env[62816]: WARNING nova.compute.manager [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Received unexpected event network-vif-plugged-933a5fba-0d17-4a7e-ba84-e96d67fb89c2 for instance with vm_state building and task_state spawning. [ 1508.490822] env[62816]: DEBUG nova.compute.manager [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Received event network-changed-933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1508.490979] env[62816]: DEBUG nova.compute.manager [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Refreshing instance network info cache due to event network-changed-933a5fba-0d17-4a7e-ba84-e96d67fb89c2. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1508.491908] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] Acquiring lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1508.537849] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788271, 'name': Rename_Task, 'duration_secs': 0.159647} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.538164] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1508.538428] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf21bd82-17c5-4262-89b3-221bcf0fc292 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.545942] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1508.545942] env[62816]: value = "task-1788272" [ 1508.545942] env[62816]: _type = "Task" [ 1508.545942] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.547619] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2061978-e140-4f08-9e82-fa17f412fb06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.561413] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b2158a-e1c1-4d1d-bdcc-f943b65816a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.566742] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.594649] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441f7038-02d2-4f97-be63-f7f02a03288f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.602601] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0275f809-050c-4bb5-aa0f-20004505196a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.616843] env[62816]: DEBUG nova.compute.provider_tree [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1508.720232] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788270, 'name': Destroy_Task, 'duration_secs': 0.688767} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1508.720500] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Destroyed the VM [ 1508.720733] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1508.720980] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-65ec1eb8-5b0e-4b76-b9ff-81cbdefd3a3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.728899] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1508.728899] env[62816]: value = "task-1788273" [ 1508.728899] env[62816]: _type = "Task" [ 1508.728899] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.741909] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788273, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.770579] env[62816]: DEBUG nova.network.neutron [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1508.864134] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1508.954402] env[62816]: DEBUG nova.network.neutron [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Updating instance_info_cache with network_info: [{"id": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "address": "fa:16:3e:66:d1:f5", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a5fba-0d", "ovs_interfaceid": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.977106] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.061917] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788272, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.099621] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "049e1f97-ab58-4797-a084-f16a7a58e2cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.100014] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.119522] env[62816]: DEBUG nova.scheduler.client.report [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1509.241675] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788273, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.440474] env[62816]: DEBUG nova.compute.manager [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Received event network-changed-64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.440726] env[62816]: DEBUG nova.compute.manager [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Refreshing instance network info cache due to event network-changed-64790bf9-4e84-424e-a85d-819c0d6cade8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1509.441055] env[62816]: DEBUG oslo_concurrency.lockutils [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] Acquiring lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1509.441310] env[62816]: DEBUG oslo_concurrency.lockutils [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] Acquired lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.441879] env[62816]: DEBUG nova.network.neutron [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Refreshing network info cache for port 64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1509.459683] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1509.460625] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Instance network_info: |[{"id": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "address": "fa:16:3e:66:d1:f5", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a5fba-0d", "ovs_interfaceid": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1509.463072] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] Acquired lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1509.463308] env[62816]: DEBUG nova.network.neutron [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Refreshing network info cache for port 933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1509.467656] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:d1:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '933a5fba-0d17-4a7e-ba84-e96d67fb89c2', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1509.474597] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Creating folder: Project (830fc28618ac4a31856cca469d46a750). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1509.477935] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43375818-3eb8-4535-af91-a22daed49949 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.490208] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Created folder: Project (830fc28618ac4a31856cca469d46a750) in parent group-v370905. [ 1509.490513] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Creating folder: Instances. Parent ref: group-v371017. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1509.490774] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51cf06c9-9a80-46bc-a74f-9b1ad20b6a79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.500763] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Created folder: Instances in parent group-v371017. [ 1509.502154] env[62816]: DEBUG oslo.service.loopingcall [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1509.502154] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1509.502154] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6555d4f1-3050-4c67-aec7-a090058d6d1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.522944] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1509.522944] env[62816]: value = "task-1788276" [ 1509.522944] env[62816]: _type = "Task" [ 1509.522944] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.531230] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788276, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.558507] env[62816]: DEBUG oslo_vmware.api [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788272, 'name': PowerOnVM_Task, 'duration_secs': 0.959301} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.558883] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1509.559185] env[62816]: INFO nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1509.559394] env[62816]: DEBUG nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1509.560267] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abd80ca4-e852-4a44-877d-3f7a127297d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.625197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.625197] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1509.631609] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.100s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.633101] env[62816]: INFO nova.compute.claims [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1509.672074] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.672330] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.672540] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.672722] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.672942] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.675318] env[62816]: INFO nova.compute.manager [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Terminating instance [ 1509.677163] env[62816]: DEBUG nova.compute.manager [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1509.677295] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.678123] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b73391-63e8-40ce-9e0d-4b180500b5a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.686508] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1509.686769] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71ae011f-efe7-41ea-8ac4-e8afbfa464c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.693803] env[62816]: DEBUG oslo_vmware.api [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1509.693803] env[62816]: value = "task-1788277" [ 1509.693803] env[62816]: _type = "Task" [ 1509.693803] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.703990] env[62816]: DEBUG oslo_vmware.api [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1788277, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.739644] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788273, 'name': RemoveSnapshot_Task} progress is 78%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.790752] env[62816]: DEBUG nova.network.neutron [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Updated VIF entry in instance network info cache for port 933a5fba-0d17-4a7e-ba84-e96d67fb89c2. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1509.792388] env[62816]: DEBUG nova.network.neutron [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Updating instance_info_cache with network_info: [{"id": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "address": "fa:16:3e:66:d1:f5", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a5fba-0d", "ovs_interfaceid": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1509.822310] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "d16a99df-f092-4d56-9730-852883bbdb70" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.822704] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.822952] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "d16a99df-f092-4d56-9730-852883bbdb70-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.823196] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.823491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.825734] env[62816]: INFO nova.compute.manager [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Terminating instance [ 1509.827701] env[62816]: DEBUG nova.compute.manager [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1509.827902] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.828810] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce937c1-00b3-4e7e-a433-48550f73e317 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.838462] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1509.838741] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d68c246-ae50-407b-8d54-614906f380b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.845670] env[62816]: DEBUG oslo_vmware.api [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1509.845670] env[62816]: value = "task-1788278" [ 1509.845670] env[62816]: _type = "Task" [ 1509.845670] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.854547] env[62816]: DEBUG oslo_vmware.api [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.046336] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788276, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.079888] env[62816]: INFO nova.compute.manager [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Took 39.81 seconds to build instance. [ 1510.141697] env[62816]: DEBUG nova.compute.utils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1510.143076] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1510.143246] env[62816]: DEBUG nova.network.neutron [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1510.207095] env[62816]: DEBUG nova.policy [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a3267ab64e4640bf00a0e5dbaaf044', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d830983a3c14168b8f0b67478f27589', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1510.207095] env[62816]: DEBUG oslo_vmware.api [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1788277, 'name': PowerOffVM_Task, 'duration_secs': 0.442735} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.207263] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1510.207547] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1510.208015] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0824b4b-eeac-4696-ab60-a6732c8af390 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.241077] env[62816]: DEBUG oslo_vmware.api [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788273, 'name': RemoveSnapshot_Task, 'duration_secs': 1.2607} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.241779] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1510.241779] env[62816]: INFO nova.compute.manager [None req-9aa33290-7faa-43e1-8121-1ee0658cc92a tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Took 16.91 seconds to snapshot the instance on the hypervisor. [ 1510.293933] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ced7c8d-894e-46ec-baf5-16c560ae83f4 req-d9260807-6fdc-423e-8358-33b4f48ca7c5 service nova] Releasing lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.302114] env[62816]: DEBUG nova.network.neutron [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updated VIF entry in instance network info cache for port 64790bf9-4e84-424e-a85d-819c0d6cade8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1510.302510] env[62816]: DEBUG nova.network.neutron [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [{"id": "64790bf9-4e84-424e-a85d-819c0d6cade8", "address": "fa:16:3e:51:d0:5a", "network": {"id": "3441fd9d-c039-4568-be50-67b60a32e449", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1977630699-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c6b942889914783a95c2abb080137a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64790bf9-4e", "ovs_interfaceid": "64790bf9-4e84-424e-a85d-819c0d6cade8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1510.314115] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1510.314115] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1510.314115] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Deleting the datastore file [datastore1] 0b10aca0-950b-46f6-8367-5cb9ea7540c8 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.314374] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a98eec56-643d-4723-806f-f97ca2238894 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.321794] env[62816]: DEBUG oslo_vmware.api [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for the task: (returnval){ [ 1510.321794] env[62816]: value = "task-1788280" [ 1510.321794] env[62816]: _type = "Task" [ 1510.321794] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.330805] env[62816]: DEBUG oslo_vmware.api [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1788280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.355112] env[62816]: DEBUG oslo_vmware.api [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788278, 'name': PowerOffVM_Task, 'duration_secs': 0.250357} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.355389] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1510.355560] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1510.355830] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-79cb03ed-c55e-4201-890a-254d66906d4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.537229] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788276, 'name': CreateVM_Task, 'duration_secs': 0.678136} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.537492] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1510.538107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1510.538268] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1510.538588] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1510.538835] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-197f8d93-7a50-4b9e-af8a-a3dc90a3c9a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.540625] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.543983] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1510.543983] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cffe0e-01b2-22bb-7849-d84bc7441cbf" [ 1510.543983] env[62816]: _type = "Task" [ 1510.543983] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.551852] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cffe0e-01b2-22bb-7849-d84bc7441cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.579057] env[62816]: DEBUG nova.network.neutron [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Successfully created port: 24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1510.581738] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dada869e-dbc0-4844-ad87-a13204acd36d tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.296s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.583113] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.042s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.583464] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.583778] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.583952] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1510.586159] env[62816]: INFO nova.compute.manager [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Terminating instance [ 1510.591990] env[62816]: DEBUG nova.compute.manager [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1510.591990] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1510.593515] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb304ce2-0443-47fc-aa5b-f5d0be8e665d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.601938] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1510.603013] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa5b402f-afe2-4df4-81ff-0137f071456f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.609489] env[62816]: DEBUG oslo_vmware.api [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1510.609489] env[62816]: value = "task-1788282" [ 1510.609489] env[62816]: _type = "Task" [ 1510.609489] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.618116] env[62816]: DEBUG oslo_vmware.api [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788282, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1510.647409] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1510.805817] env[62816]: DEBUG oslo_concurrency.lockutils [req-5256f006-9600-4810-8046-79a71e312592 req-1e200ef0-62c3-4cc9-9b47-60870b6f18b2 service nova] Releasing lock "refresh_cache-d16a99df-f092-4d56-9730-852883bbdb70" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1510.820636] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1510.820849] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1510.821044] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Deleting the datastore file [datastore1] d16a99df-f092-4d56-9730-852883bbdb70 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1510.821375] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2221f1c7-5aa0-4792-8dfc-6ed8e7973985 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.834833] env[62816]: DEBUG oslo_vmware.api [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1510.834833] env[62816]: value = "task-1788283" [ 1510.834833] env[62816]: _type = "Task" [ 1510.834833] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1510.838438] env[62816]: DEBUG oslo_vmware.api [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Task: {'id': task-1788280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151533} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1510.841682] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1510.841897] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1510.842099] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.842544] env[62816]: INFO nova.compute.manager [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1510.842799] env[62816]: DEBUG oslo.service.loopingcall [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.842999] env[62816]: DEBUG nova.compute.manager [-] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1510.843102] env[62816]: DEBUG nova.network.neutron [-] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1510.852470] env[62816]: DEBUG oslo_vmware.api [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788283, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.056406] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cffe0e-01b2-22bb-7849-d84bc7441cbf, 'name': SearchDatastore_Task, 'duration_secs': 0.016658} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.056926] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.057191] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1511.057453] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.057643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.057823] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1511.062431] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8e9cb71a-d70f-4467-b3f9-5087f8730345 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.074159] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1511.074832] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1511.077027] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ee38c46-e35a-4962-950d-b8bafd934fde {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.085733] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1511.085733] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cb9c7f-7b78-4acc-c669-c989a1a48307" [ 1511.085733] env[62816]: _type = "Task" [ 1511.085733] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.086276] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1511.101522] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb9c7f-7b78-4acc-c669-c989a1a48307, 'name': SearchDatastore_Task, 'duration_secs': 0.008946} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.105743] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-930ead85-f183-4822-bc75-fcbb56c57499 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.112560] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1511.112560] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523e4667-2501-28f8-4b1b-b0e22f097d09" [ 1511.112560] env[62816]: _type = "Task" [ 1511.112560] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.130303] env[62816]: DEBUG oslo_vmware.api [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788282, 'name': PowerOffVM_Task, 'duration_secs': 0.357955} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.134808] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1511.134950] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1511.135281] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523e4667-2501-28f8-4b1b-b0e22f097d09, 'name': SearchDatastore_Task, 'duration_secs': 0.009177} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.135509] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5b8dd782-f197-449e-8cf2-0b64649fc4ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.137491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1511.137784] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/e003e41d-93e8-4258-b8ca-3c2420b73df0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1511.143423] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-007faffb-93e9-475b-be93-b04e89e00fdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.149193] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1511.149193] env[62816]: value = "task-1788285" [ 1511.149193] env[62816]: _type = "Task" [ 1511.149193] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.167200] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788285, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.195388] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a130c25-52bd-4148-b0bc-bbcea8b0cc67 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.204276] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a03dffc-bb0c-4ec8-b245-d9e663994daa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.244184] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e965578d-41cd-4cde-b6bd-788f26d24053 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.247375] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1511.247559] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1511.247861] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Deleting the datastore file [datastore1] afd02433-0912-44ef-8e0e-71d6ee8fbb41 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1511.248446] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1eaf26d4-1964-44a2-ac71-334fa002cc3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.257653] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994153bc-a10f-4706-a890-d4ffcbbea340 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.262501] env[62816]: DEBUG oslo_vmware.api [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for the task: (returnval){ [ 1511.262501] env[62816]: value = "task-1788286" [ 1511.262501] env[62816]: _type = "Task" [ 1511.262501] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.274968] env[62816]: DEBUG nova.compute.provider_tree [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1511.281852] env[62816]: DEBUG oslo_vmware.api [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.349223] env[62816]: DEBUG oslo_vmware.api [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788283, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152869} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.349512] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1511.349709] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1511.350000] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1511.350275] env[62816]: INFO nova.compute.manager [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Took 1.52 seconds to destroy the instance on the hypervisor. [ 1511.350548] env[62816]: DEBUG oslo.service.loopingcall [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.351275] env[62816]: DEBUG nova.compute.manager [-] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1511.351275] env[62816]: DEBUG nova.network.neutron [-] [instance: d16a99df-f092-4d56-9730-852883bbdb70] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1511.579226] env[62816]: DEBUG nova.compute.manager [req-7767b8df-3b2b-458d-8690-18eb4356482b req-61845ecb-2198-4596-b770-fc47ffc99bfb service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Received event network-vif-deleted-625f74d1-1d6d-4ca8-90f2-5b8327963031 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.579226] env[62816]: INFO nova.compute.manager [req-7767b8df-3b2b-458d-8690-18eb4356482b req-61845ecb-2198-4596-b770-fc47ffc99bfb service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Neutron deleted interface 625f74d1-1d6d-4ca8-90f2-5b8327963031; detaching it from the instance and deleting it from the info cache [ 1511.579226] env[62816]: DEBUG nova.network.neutron [req-7767b8df-3b2b-458d-8690-18eb4356482b req-61845ecb-2198-4596-b770-fc47ffc99bfb service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.613620] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.660804] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788285, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509397} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.661541] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/e003e41d-93e8-4258-b8ca-3c2420b73df0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1511.662089] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1511.664090] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1511.669046] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-384d8b52-4a70-4540-afd4-ee93b46d2f2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.673991] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1511.673991] env[62816]: value = "task-1788287" [ 1511.673991] env[62816]: _type = "Task" [ 1511.673991] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1511.683737] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788287, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1511.706231] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1511.706509] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1511.706672] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1511.706868] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1511.707044] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1511.707202] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1511.707424] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1511.707595] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1511.707780] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1511.707938] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1511.708330] env[62816]: DEBUG nova.virt.hardware [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1511.709478] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531a62f5-9111-4a15-be01-672bfaa3e544 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.722619] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7527c8a6-01f7-470d-a0bc-d95a4c1678ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1511.776029] env[62816]: DEBUG oslo_vmware.api [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Task: {'id': task-1788286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472559} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1511.776029] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1511.776029] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1511.776029] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1511.776029] env[62816]: INFO nova.compute.manager [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1511.776029] env[62816]: DEBUG oslo.service.loopingcall [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1511.776029] env[62816]: DEBUG nova.compute.manager [-] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1511.776029] env[62816]: DEBUG nova.network.neutron [-] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1511.777991] env[62816]: DEBUG nova.scheduler.client.report [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1511.887595] env[62816]: DEBUG nova.compute.manager [req-98d71168-7e39-471a-9068-d957bc7738e3 req-619e4b4f-c685-4e50-9b81-0de6668ba9c5 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Received event network-vif-deleted-64790bf9-4e84-424e-a85d-819c0d6cade8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.887799] env[62816]: INFO nova.compute.manager [req-98d71168-7e39-471a-9068-d957bc7738e3 req-619e4b4f-c685-4e50-9b81-0de6668ba9c5 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Neutron deleted interface 64790bf9-4e84-424e-a85d-819c0d6cade8; detaching it from the instance and deleting it from the info cache [ 1511.887970] env[62816]: DEBUG nova.network.neutron [req-98d71168-7e39-471a-9068-d957bc7738e3 req-619e4b4f-c685-4e50-9b81-0de6668ba9c5 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.929257] env[62816]: DEBUG nova.network.neutron [-] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.072364] env[62816]: DEBUG nova.compute.manager [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1512.072364] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a75fbd-e8dd-457e-b5a0-ac4cdc361273 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.082983] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-476e41a2-d840-4770-9627-e887c33684ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.091228] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dccb693-eca8-4083-b566-8e64c5a1288b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.126322] env[62816]: DEBUG nova.compute.manager [req-7767b8df-3b2b-458d-8690-18eb4356482b req-61845ecb-2198-4596-b770-fc47ffc99bfb service nova] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Detach interface failed, port_id=625f74d1-1d6d-4ca8-90f2-5b8327963031, reason: Instance 0b10aca0-950b-46f6-8367-5cb9ea7540c8 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1512.184261] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788287, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068188} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.184573] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1512.185398] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8095a30-24b9-4e64-a4fc-750dad5dd98d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.211462] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/e003e41d-93e8-4258-b8ca-3c2420b73df0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1512.213343] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a62f30b-559f-43f2-8b62-bf85efcbf19a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.234356] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1512.234356] env[62816]: value = "task-1788288" [ 1512.234356] env[62816]: _type = "Task" [ 1512.234356] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.244392] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788288, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.284283] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1512.284922] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1512.287554] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.458s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1512.288976] env[62816]: INFO nova.compute.claims [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1512.336582] env[62816]: DEBUG nova.network.neutron [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Successfully updated port: 24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1512.368322] env[62816]: DEBUG nova.network.neutron [-] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.391045] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-81a244fa-6d08-467c-af9d-9ba815392e6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.404411] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e74b49-481b-40d2-bee3-babb4e3367d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.439109] env[62816]: INFO nova.compute.manager [-] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Took 1.60 seconds to deallocate network for instance. [ 1512.439518] env[62816]: DEBUG nova.compute.manager [req-98d71168-7e39-471a-9068-d957bc7738e3 req-619e4b4f-c685-4e50-9b81-0de6668ba9c5 service nova] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Detach interface failed, port_id=64790bf9-4e84-424e-a85d-819c0d6cade8, reason: Instance d16a99df-f092-4d56-9730-852883bbdb70 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1512.580128] env[62816]: INFO nova.compute.manager [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] instance snapshotting [ 1512.583196] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5f5498-0c64-43cc-80a8-3117a3c0de76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.602718] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05830d50-2ee6-4b2d-8861-8f97ac5890f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.658695] env[62816]: DEBUG nova.network.neutron [-] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.746676] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788288, 'name': ReconfigVM_Task, 'duration_secs': 0.309841} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1512.746969] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Reconfigured VM instance instance-00000026 to attach disk [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/e003e41d-93e8-4258-b8ca-3c2420b73df0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1512.747588] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-182da977-ebfe-42c1-8bcf-b8f554b2dd2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.754790] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1512.754790] env[62816]: value = "task-1788289" [ 1512.754790] env[62816]: _type = "Task" [ 1512.754790] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1512.764081] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788289, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1512.795772] env[62816]: DEBUG nova.compute.utils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1512.797388] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1512.797560] env[62816]: DEBUG nova.network.neutron [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1512.836553] env[62816]: DEBUG nova.policy [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6df9f788125e4f7d8885384e03488126', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '404c503542fc4480a676e84efe500cd9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1512.839907] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "refresh_cache-946dad01-c012-457d-8bfe-6395ff0aaedf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.840063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "refresh_cache-946dad01-c012-457d-8bfe-6395ff0aaedf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.840207] env[62816]: DEBUG nova.network.neutron [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.870616] env[62816]: INFO nova.compute.manager [-] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Took 1.52 seconds to deallocate network for instance. [ 1512.953678] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.115756] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1513.115756] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-507d75c7-0734-462b-bad4-bf51547fb003 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.131047] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1513.131047] env[62816]: value = "task-1788290" [ 1513.131047] env[62816]: _type = "Task" [ 1513.131047] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.136517] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788290, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.162271] env[62816]: INFO nova.compute.manager [-] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Took 1.39 seconds to deallocate network for instance. [ 1513.266418] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788289, 'name': Rename_Task, 'duration_secs': 0.135746} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.266714] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1513.266972] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6fa6a821-1e68-47cd-8972-68bd0ebb8caf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.275764] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1513.275764] env[62816]: value = "task-1788291" [ 1513.275764] env[62816]: _type = "Task" [ 1513.275764] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1513.289766] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788291, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.301554] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1513.377717] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.390781] env[62816]: DEBUG nova.network.neutron [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.460624] env[62816]: DEBUG nova.network.neutron [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Successfully created port: bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1513.609453] env[62816]: DEBUG nova.network.neutron [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Updating instance_info_cache with network_info: [{"id": "24da593b-3452-47bf-a201-21c83c64cae8", "address": "fa:16:3e:b1:f9:33", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24da593b-34", "ovs_interfaceid": "24da593b-3452-47bf-a201-21c83c64cae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1513.642148] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788290, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1513.674929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.786292] env[62816]: DEBUG nova.compute.manager [req-2ac06d62-7e92-4366-9be8-46af6a7501ca req-ce2fb450-47e1-428d-9dde-4c49bf65ada1 service nova] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Received event network-vif-deleted-e5ca1bb3-2557-4130-9f1b-0efec87f923a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1513.789432] env[62816]: DEBUG oslo_vmware.api [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788291, 'name': PowerOnVM_Task, 'duration_secs': 0.460983} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1513.789693] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1513.789888] env[62816]: INFO nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Took 6.90 seconds to spawn the instance on the hypervisor. [ 1513.790080] env[62816]: DEBUG nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1513.791118] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0268ca84-46f8-41ce-b606-990b7f8e6085 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.891752] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aec0d23-d820-413e-a1f1-0cee58e5fdd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.901609] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46374515-1989-4d28-8bc2-764a4063a241 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.936764] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e280e22-40f3-44c7-b073-70c2463c1c53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.945686] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d683d5b0-56ae-4939-9d18-e0208c3520ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.962200] env[62816]: DEBUG nova.compute.provider_tree [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.965524] env[62816]: DEBUG nova.compute.manager [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Received event network-vif-plugged-24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1513.966076] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] Acquiring lock "946dad01-c012-457d-8bfe-6395ff0aaedf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1513.966076] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1513.966199] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1513.967368] env[62816]: DEBUG nova.compute.manager [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] No waiting events found dispatching network-vif-plugged-24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1513.967368] env[62816]: WARNING nova.compute.manager [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Received unexpected event network-vif-plugged-24da593b-3452-47bf-a201-21c83c64cae8 for instance with vm_state building and task_state spawning. [ 1513.967368] env[62816]: DEBUG nova.compute.manager [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Received event network-changed-24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1513.967368] env[62816]: DEBUG nova.compute.manager [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Refreshing instance network info cache due to event network-changed-24da593b-3452-47bf-a201-21c83c64cae8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1513.967368] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] Acquiring lock "refresh_cache-946dad01-c012-457d-8bfe-6395ff0aaedf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.117891] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "refresh_cache-946dad01-c012-457d-8bfe-6395ff0aaedf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.118493] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Instance network_info: |[{"id": "24da593b-3452-47bf-a201-21c83c64cae8", "address": "fa:16:3e:b1:f9:33", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24da593b-34", "ovs_interfaceid": "24da593b-3452-47bf-a201-21c83c64cae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1514.118873] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] Acquired lock "refresh_cache-946dad01-c012-457d-8bfe-6395ff0aaedf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.119137] env[62816]: DEBUG nova.network.neutron [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Refreshing network info cache for port 24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1514.121060] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b1:f9:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24da593b-3452-47bf-a201-21c83c64cae8', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.129405] env[62816]: DEBUG oslo.service.loopingcall [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1514.132547] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.133594] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c1bc4bd1-400b-45d2-9ead-a6779ee57869 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.158472] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788290, 'name': CreateSnapshot_Task, 'duration_secs': 0.519289} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.162978] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1514.163242] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.163242] env[62816]: value = "task-1788292" [ 1514.163242] env[62816]: _type = "Task" [ 1514.163242] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.164105] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06108e8-8cdd-4234-aea3-dc5f6a50f653 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.184688] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788292, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.316020] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1514.317081] env[62816]: INFO nova.compute.manager [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Took 36.46 seconds to build instance. [ 1514.344836] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1514.345093] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1514.345252] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1514.345436] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1514.345585] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1514.345731] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1514.345943] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1514.346097] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1514.346262] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1514.346418] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1514.346585] env[62816]: DEBUG nova.virt.hardware [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1514.347733] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19098b54-5af4-41b3-aea4-d0710b43092c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.355912] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5453f9-4398-4d96-98cb-2cf04f95bb35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.470615] env[62816]: DEBUG nova.scheduler.client.report [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1514.574899] env[62816]: DEBUG nova.network.neutron [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Updated VIF entry in instance network info cache for port 24da593b-3452-47bf-a201-21c83c64cae8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1514.575150] env[62816]: DEBUG nova.network.neutron [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Updating instance_info_cache with network_info: [{"id": "24da593b-3452-47bf-a201-21c83c64cae8", "address": "fa:16:3e:b1:f9:33", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24da593b-34", "ovs_interfaceid": "24da593b-3452-47bf-a201-21c83c64cae8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.680463] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788292, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.693015] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1514.693536] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5746d2a6-47e4-4eda-ad45-0a20e28be753 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.701887] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1514.701887] env[62816]: value = "task-1788293" [ 1514.701887] env[62816]: _type = "Task" [ 1514.701887] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.710496] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788293, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.819790] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3b3c7242-3a72-47d5-b708-23a7268be483 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.983s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.975053] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.687s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.975675] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1514.980050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.464s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.985686] env[62816]: INFO nova.compute.claims [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1515.078469] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd2ad4af-2841-4ac2-b07c-93e1290cf548 req-ef3f9274-d8c4-456b-9d17-297014a72572 service nova] Releasing lock "refresh_cache-946dad01-c012-457d-8bfe-6395ff0aaedf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.181216] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788292, 'name': CreateVM_Task, 'duration_secs': 0.646865} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.181583] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1515.182390] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.182680] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.183103] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1515.183536] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2728543-9caf-4e45-a04a-2f1842b4ce3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.188099] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1515.188099] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52929303-e521-6aae-642d-29cdcb8335bb" [ 1515.188099] env[62816]: _type = "Task" [ 1515.188099] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.196434] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52929303-e521-6aae-642d-29cdcb8335bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.217939] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788293, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.322753] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1515.495266] env[62816]: DEBUG nova.compute.utils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1515.496609] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1515.496785] env[62816]: DEBUG nova.network.neutron [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1515.543658] env[62816]: DEBUG nova.network.neutron [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Successfully updated port: bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1515.561028] env[62816]: DEBUG nova.policy [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9fdfaf9360f4dbb959bf3e8bcbee731', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8179e67e019493a894cd7c67825743c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1515.588829] env[62816]: INFO nova.compute.manager [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Rescuing [ 1515.588923] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.589066] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.589241] env[62816]: DEBUG nova.network.neutron [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1515.698895] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52929303-e521-6aae-642d-29cdcb8335bb, 'name': SearchDatastore_Task, 'duration_secs': 0.00947} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.699241] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.699474] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1515.699704] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.699851] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.700050] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.700317] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fdab83bf-0ee5-4d9d-b301-d3ffa69fd966 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.710654] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.710806] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1515.711805] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c02d2b6d-9418-4663-bdbb-966e0c1fc4dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.718041] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788293, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.719414] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1515.719414] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523953ee-64bc-b1d4-74b1-c5dfb437014f" [ 1515.719414] env[62816]: _type = "Task" [ 1515.719414] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.727716] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523953ee-64bc-b1d4-74b1-c5dfb437014f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.863358] env[62816]: DEBUG nova.network.neutron [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Successfully created port: c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1515.956939] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.988949] env[62816]: DEBUG nova.compute.manager [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Received event network-vif-plugged-bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1515.989423] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] Acquiring lock "48b74d52-e764-4d14-b372-fc34872205dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1515.989751] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] Lock "48b74d52-e764-4d14-b372-fc34872205dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.990182] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] Lock "48b74d52-e764-4d14-b372-fc34872205dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1515.990497] env[62816]: DEBUG nova.compute.manager [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] No waiting events found dispatching network-vif-plugged-bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1515.990777] env[62816]: WARNING nova.compute.manager [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Received unexpected event network-vif-plugged-bc9889f6-a785-436a-a67e-892333ea07e1 for instance with vm_state building and task_state spawning. [ 1515.991142] env[62816]: DEBUG nova.compute.manager [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Received event network-changed-bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1515.991340] env[62816]: DEBUG nova.compute.manager [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Refreshing instance network info cache due to event network-changed-bc9889f6-a785-436a-a67e-892333ea07e1. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1515.991807] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] Acquiring lock "refresh_cache-48b74d52-e764-4d14-b372-fc34872205dd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.992082] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] Acquired lock "refresh_cache-48b74d52-e764-4d14-b372-fc34872205dd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.992470] env[62816]: DEBUG nova.network.neutron [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Refreshing network info cache for port bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1516.004495] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1516.047142] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "refresh_cache-48b74d52-e764-4d14-b372-fc34872205dd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1516.220391] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788293, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.235700] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523953ee-64bc-b1d4-74b1-c5dfb437014f, 'name': SearchDatastore_Task, 'duration_secs': 0.010613} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.236590] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-381fbd97-3ba2-4a55-9eac-28a850a5219c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.242111] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1516.242111] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521d788a-f0fc-9786-f4a6-37714de2cc87" [ 1516.242111] env[62816]: _type = "Task" [ 1516.242111] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.255438] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521d788a-f0fc-9786-f4a6-37714de2cc87, 'name': SearchDatastore_Task, 'duration_secs': 0.008637} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.255815] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.256504] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 946dad01-c012-457d-8bfe-6395ff0aaedf/946dad01-c012-457d-8bfe-6395ff0aaedf.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1516.256504] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08c54419-e7f3-4a00-84b8-eaaabd04cf67 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.264107] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1516.264107] env[62816]: value = "task-1788294" [ 1516.264107] env[62816]: _type = "Task" [ 1516.264107] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.274185] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788294, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.449210] env[62816]: DEBUG nova.network.neutron [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Updating instance_info_cache with network_info: [{"id": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "address": "fa:16:3e:66:d1:f5", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap933a5fba-0d", "ovs_interfaceid": "933a5fba-0d17-4a7e-ba84-e96d67fb89c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.546284] env[62816]: DEBUG nova.network.neutron [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1516.599056] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0064ed-c7e6-4111-a19b-f274f9b2c40c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.607843] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df895c2-6400-42d6-811f-bd956f9951a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.649469] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3a274a5-e7df-4333-ade2-f14644bd255b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.660564] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cae4524-f1f7-4096-a785-a4a026828c65 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.676260] env[62816]: DEBUG nova.compute.provider_tree [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1516.706454] env[62816]: DEBUG nova.network.neutron [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.719955] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788293, 'name': CloneVM_Task, 'duration_secs': 1.835696} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.720884] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Created linked-clone VM from snapshot [ 1516.721502] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ce73b7-ecb4-4128-9394-6b39d6e12e4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.730116] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Uploading image 60677bd4-81fd-4001-9e6f-2de81a3b0680 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1516.757170] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1516.757170] env[62816]: value = "vm-371022" [ 1516.757170] env[62816]: _type = "VirtualMachine" [ 1516.757170] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1516.757977] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fbc3dc5d-e301-4995-957e-96e6229b6c26 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.766980] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lease: (returnval){ [ 1516.766980] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529891ad-1ae3-f99c-9446-49fc146ef9ad" [ 1516.766980] env[62816]: _type = "HttpNfcLease" [ 1516.766980] env[62816]: } obtained for exporting VM: (result){ [ 1516.766980] env[62816]: value = "vm-371022" [ 1516.766980] env[62816]: _type = "VirtualMachine" [ 1516.766980] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1516.767277] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the lease: (returnval){ [ 1516.767277] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529891ad-1ae3-f99c-9446-49fc146ef9ad" [ 1516.767277] env[62816]: _type = "HttpNfcLease" [ 1516.767277] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1516.778101] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788294, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.779628] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1516.779628] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529891ad-1ae3-f99c-9446-49fc146ef9ad" [ 1516.779628] env[62816]: _type = "HttpNfcLease" [ 1516.779628] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1516.950507] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "refresh_cache-e003e41d-93e8-4258-b8ca-3c2420b73df0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.021736] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1517.043353] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1517.043621] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1517.043799] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1517.044091] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1517.044262] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1517.044411] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1517.044662] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1517.044831] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1517.045008] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1517.045183] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1517.045361] env[62816]: DEBUG nova.virt.hardware [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1517.046281] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7ed71e-a887-4b2b-8e94-d854e12a8e86 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.058289] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f84f819a-87bc-4450-a190-139d07cf4410 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.179656] env[62816]: DEBUG nova.scheduler.client.report [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1517.209094] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d473e10-66c5-46c0-a076-103f831e8fb2 req-b188ed26-0613-44f0-a73a-00af42bbaf20 service nova] Releasing lock "refresh_cache-48b74d52-e764-4d14-b372-fc34872205dd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.209477] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquired lock "refresh_cache-48b74d52-e764-4d14-b372-fc34872205dd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.209641] env[62816]: DEBUG nova.network.neutron [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1517.280259] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788294, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525153} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.281828] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 946dad01-c012-457d-8bfe-6395ff0aaedf/946dad01-c012-457d-8bfe-6395ff0aaedf.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1517.282068] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1517.282614] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1517.282614] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529891ad-1ae3-f99c-9446-49fc146ef9ad" [ 1517.282614] env[62816]: _type = "HttpNfcLease" [ 1517.282614] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1517.282805] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fad66a1d-1a37-4d65-8cd2-956b0e816cf1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.284670] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1517.284670] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529891ad-1ae3-f99c-9446-49fc146ef9ad" [ 1517.284670] env[62816]: _type = "HttpNfcLease" [ 1517.284670] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1517.285379] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d55236b2-4f89-4f93-83a0-8feaa9b5db3d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.293288] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528bd63c-0516-d00a-6b8c-b7bf255dd0d7/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1517.293498] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528bd63c-0516-d00a-6b8c-b7bf255dd0d7/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1517.296013] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1517.296013] env[62816]: value = "task-1788296" [ 1517.296013] env[62816]: _type = "Task" [ 1517.296013] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.362500] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788296, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.479748] env[62816]: DEBUG nova.compute.manager [req-325cebfe-afc3-45f0-972a-b49a88388c99 req-ddbfabc8-cce5-490b-a977-44b15074168d service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Received event network-vif-plugged-c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.479964] env[62816]: DEBUG oslo_concurrency.lockutils [req-325cebfe-afc3-45f0-972a-b49a88388c99 req-ddbfabc8-cce5-490b-a977-44b15074168d service nova] Acquiring lock "ba6e94c9-eb58-4040-8e28-f255961e76ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.480286] env[62816]: DEBUG oslo_concurrency.lockutils [req-325cebfe-afc3-45f0-972a-b49a88388c99 req-ddbfabc8-cce5-490b-a977-44b15074168d service nova] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.480458] env[62816]: DEBUG oslo_concurrency.lockutils [req-325cebfe-afc3-45f0-972a-b49a88388c99 req-ddbfabc8-cce5-490b-a977-44b15074168d service nova] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.480539] env[62816]: DEBUG nova.compute.manager [req-325cebfe-afc3-45f0-972a-b49a88388c99 req-ddbfabc8-cce5-490b-a977-44b15074168d service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] No waiting events found dispatching network-vif-plugged-c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1517.480670] env[62816]: WARNING nova.compute.manager [req-325cebfe-afc3-45f0-972a-b49a88388c99 req-ddbfabc8-cce5-490b-a977-44b15074168d service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Received unexpected event network-vif-plugged-c4be71b6-096d-43a8-9cf0-74f91c97d74c for instance with vm_state building and task_state spawning. [ 1517.481252] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1517.481751] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1198416f-997c-43ed-8f64-99128cd3c518 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.489597] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1517.489597] env[62816]: value = "task-1788297" [ 1517.489597] env[62816]: _type = "Task" [ 1517.489597] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.499260] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.513068] env[62816]: DEBUG nova.network.neutron [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Successfully updated port: c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1517.514387] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-407d3798-e896-4ca1-9e5f-b9e1b6c978d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.556767] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-6767c231-2dcb-4d19-ae7c-5b026d48ed26-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.557040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-6767c231-2dcb-4d19-ae7c-5b026d48ed26-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.557406] env[62816]: DEBUG nova.objects.instance [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'flavor' on Instance uuid 6767c231-2dcb-4d19-ae7c-5b026d48ed26 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1517.684984] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.705s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.685637] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1517.688993] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.185s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.689287] env[62816]: DEBUG nova.objects.instance [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lazy-loading 'resources' on Instance uuid f1914aaa-1f3d-48b7-a6d2-ceea16dc786a {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1517.746048] env[62816]: DEBUG nova.network.neutron [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1517.808245] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788296, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.408309} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.808513] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1517.809417] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c66848-d6fd-406e-890e-c21e8add4b4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.833702] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 946dad01-c012-457d-8bfe-6395ff0aaedf/946dad01-c012-457d-8bfe-6395ff0aaedf.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1517.836736] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bd6f876-11ef-4411-9b35-309c79bc83ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.858299] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1517.858299] env[62816]: value = "task-1788298" [ 1517.858299] env[62816]: _type = "Task" [ 1517.858299] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.867890] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788298, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.982180] env[62816]: DEBUG nova.network.neutron [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Updating instance_info_cache with network_info: [{"id": "bc9889f6-a785-436a-a67e-892333ea07e1", "address": "fa:16:3e:91:ca:8a", "network": {"id": "c2ce14ed-ae02-458d-a472-ac53284bad98", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-447168174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "404c503542fc4480a676e84efe500cd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9889f6-a7", "ovs_interfaceid": "bc9889f6-a785-436a-a67e-892333ea07e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.002418] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788297, 'name': PowerOffVM_Task, 'duration_secs': 0.189765} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.002716] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1518.003725] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1437ef-ccc3-4690-878b-3f6f925a35e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.031115] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "refresh_cache-ba6e94c9-eb58-4040-8e28-f255961e76ca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.031270] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "refresh_cache-ba6e94c9-eb58-4040-8e28-f255961e76ca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.031429] env[62816]: DEBUG nova.network.neutron [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.033312] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb03c516-074c-44ee-a4ee-8273355c72b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.073993] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1518.074445] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-855726af-c01a-4556-9198-d853899f6211 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.082276] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1518.082276] env[62816]: value = "task-1788299" [ 1518.082276] env[62816]: _type = "Task" [ 1518.082276] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.094305] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1518.094557] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.095062] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.095737] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.096049] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.096628] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cf728086-3c00-4167-a28f-500f921fc319 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.142267] env[62816]: DEBUG nova.objects.instance [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'pci_requests' on Instance uuid 6767c231-2dcb-4d19-ae7c-5b026d48ed26 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1518.193742] env[62816]: DEBUG nova.compute.utils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1518.199046] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1518.199326] env[62816]: DEBUG nova.network.neutron [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1518.263392] env[62816]: DEBUG nova.policy [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b8b117d0fbd4db2873e88e691bff621', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7931dc5c9a614764a02086f070df1b00', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1518.375062] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788298, 'name': ReconfigVM_Task, 'duration_secs': 0.452811} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.375555] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 946dad01-c012-457d-8bfe-6395ff0aaedf/946dad01-c012-457d-8bfe-6395ff0aaedf.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.376959] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-015415b4-7377-4961-9db0-f9a27bcf96e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.385688] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1518.385688] env[62816]: value = "task-1788300" [ 1518.385688] env[62816]: _type = "Task" [ 1518.385688] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.396667] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788300, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.461716] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.461937] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.462803] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-376b4798-3e42-4e3b-b76e-532f01f13599 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.476049] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1518.476049] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fa11b2-e3f5-cc08-5284-15a227e55087" [ 1518.476049] env[62816]: _type = "Task" [ 1518.476049] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.483776] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Releasing lock "refresh_cache-48b74d52-e764-4d14-b372-fc34872205dd" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.485170] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Instance network_info: |[{"id": "bc9889f6-a785-436a-a67e-892333ea07e1", "address": "fa:16:3e:91:ca:8a", "network": {"id": "c2ce14ed-ae02-458d-a472-ac53284bad98", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-447168174-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "404c503542fc4480a676e84efe500cd9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fb224918-e294-4b76-80f9-2fa0031b7dc2", "external-id": "nsx-vlan-transportzone-876", "segmentation_id": 876, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc9889f6-a7", "ovs_interfaceid": "bc9889f6-a785-436a-a67e-892333ea07e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1518.485170] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fa11b2-e3f5-cc08-5284-15a227e55087, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.485170] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:ca:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fb224918-e294-4b76-80f9-2fa0031b7dc2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc9889f6-a785-436a-a67e-892333ea07e1', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1518.494306] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Creating folder: Project (404c503542fc4480a676e84efe500cd9). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1518.498073] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf604ad6-fe89-495f-a0e3-bf35166957c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.509872] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Created folder: Project (404c503542fc4480a676e84efe500cd9) in parent group-v370905. [ 1518.510100] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Creating folder: Instances. Parent ref: group-v371023. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1518.510370] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17f9c42a-a9d3-47f1-8761-db8d5aadf97f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.525423] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Created folder: Instances in parent group-v371023. [ 1518.525715] env[62816]: DEBUG oslo.service.loopingcall [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1518.525905] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1518.526243] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1691c16c-5c3c-412b-8308-c1216584de0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.552950] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1518.552950] env[62816]: value = "task-1788303" [ 1518.552950] env[62816]: _type = "Task" [ 1518.552950] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.567131] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788303, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.605480] env[62816]: DEBUG nova.network.neutron [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1518.609416] env[62816]: DEBUG nova.network.neutron [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Successfully created port: fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.646099] env[62816]: DEBUG nova.objects.base [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Object Instance<6767c231-2dcb-4d19-ae7c-5b026d48ed26> lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1518.649022] env[62816]: DEBUG nova.network.neutron [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1518.703621] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1518.749138] env[62816]: DEBUG nova.policy [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1518.839253] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-546135b4-2fc9-4ecb-8bf6-9bac9286552f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.847456] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0f1736-1f00-48c9-b942-ec4fbbc0b03f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.882854] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba042c9-545c-4da9-ae0d-4c33a9ea6ef5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.892676] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958aa3a5-fc54-4566-b8ed-0e5c66846fe8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.899612] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788300, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.911703] env[62816]: DEBUG nova.compute.provider_tree [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.971939] env[62816]: DEBUG nova.network.neutron [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Updating instance_info_cache with network_info: [{"id": "c4be71b6-096d-43a8-9cf0-74f91c97d74c", "address": "fa:16:3e:cf:db:de", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4be71b6-09", "ovs_interfaceid": "c4be71b6-096d-43a8-9cf0-74f91c97d74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.986482] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fa11b2-e3f5-cc08-5284-15a227e55087, 'name': SearchDatastore_Task, 'duration_secs': 0.012685} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.988045] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53b57d67-6757-4d7b-951a-2d4257252c1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.994386] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1518.994386] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527dd689-2f89-7ea5-fcdf-da4762018a63" [ 1518.994386] env[62816]: _type = "Task" [ 1518.994386] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.003697] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527dd689-2f89-7ea5-fcdf-da4762018a63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.064276] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788303, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.215411] env[62816]: DEBUG nova.network.neutron [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Successfully created port: ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1519.396560] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788300, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.415769] env[62816]: DEBUG nova.scheduler.client.report [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1519.475251] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "refresh_cache-ba6e94c9-eb58-4040-8e28-f255961e76ca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.475621] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Instance network_info: |[{"id": "c4be71b6-096d-43a8-9cf0-74f91c97d74c", "address": "fa:16:3e:cf:db:de", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4be71b6-09", "ovs_interfaceid": "c4be71b6-096d-43a8-9cf0-74f91c97d74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1519.476136] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:db:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4be71b6-096d-43a8-9cf0-74f91c97d74c', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1519.485508] env[62816]: DEBUG oslo.service.loopingcall [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.487029] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1519.487029] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8cf6b555-3c22-4724-b739-2c83beff0ece {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.515960] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527dd689-2f89-7ea5-fcdf-da4762018a63, 'name': SearchDatastore_Task, 'duration_secs': 0.00954} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.517366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.517725] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. {{(pid=62816) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1519.518081] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1519.518081] env[62816]: value = "task-1788304" [ 1519.518081] env[62816]: _type = "Task" [ 1519.518081] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.518518] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83fd89c3-fbff-43cf-b3e1-53a987fdcb59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.532067] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788304, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.533436] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1519.533436] env[62816]: value = "task-1788305" [ 1519.533436] env[62816]: _type = "Task" [ 1519.533436] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.542192] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788305, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.564728] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788303, 'name': CreateVM_Task, 'duration_secs': 0.532955} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.564928] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1519.565720] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.565919] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.566289] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1519.566647] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-709b7520-28e7-412d-9bd3-94356a330410 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.572087] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1519.572087] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528f659c-9bb2-eda2-84de-8fcead9ee6a4" [ 1519.572087] env[62816]: _type = "Task" [ 1519.572087] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.580698] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528f659c-9bb2-eda2-84de-8fcead9ee6a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.713155] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1519.736636] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1519.737271] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1519.737271] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1519.737271] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1519.737428] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1519.737532] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1519.739099] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1519.739099] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1519.739099] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1519.739099] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1519.739099] env[62816]: DEBUG nova.virt.hardware [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1519.739317] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b18ddb-f9a3-4c9a-b25f-2630c74dd649 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.748044] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1936e559-acde-4402-9654-0eb56ee6700a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.898618] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788300, 'name': Rename_Task, 'duration_secs': 1.30241} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.898953] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1519.899242] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89699cf9-d935-4abd-a2de-6f3d809db644 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.906089] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1519.906089] env[62816]: value = "task-1788306" [ 1519.906089] env[62816]: _type = "Task" [ 1519.906089] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.917080] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788306, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.921052] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.923461] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 25.567s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.945455] env[62816]: INFO nova.scheduler.client.report [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Deleted allocations for instance f1914aaa-1f3d-48b7-a6d2-ceea16dc786a [ 1520.035736] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788304, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.048471] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788305, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.084187] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528f659c-9bb2-eda2-84de-8fcead9ee6a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009357} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.084560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1520.084878] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1520.085167] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.085320] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.085720] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1520.085829] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3aa07191-b2d0-4002-ad79-524f389cf6e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.106732] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1520.107074] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1520.108196] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086d5ce4-5b31-4796-a8dd-fe6b01f42e0b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.115252] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1520.115252] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525fb84b-b014-e4fc-cd7a-b75a0f076a52" [ 1520.115252] env[62816]: _type = "Task" [ 1520.115252] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.125495] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525fb84b-b014-e4fc-cd7a-b75a0f076a52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.187770] env[62816]: DEBUG nova.network.neutron [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Successfully updated port: fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.268362] env[62816]: DEBUG nova.compute.manager [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Received event network-changed-c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1520.268362] env[62816]: DEBUG nova.compute.manager [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Refreshing instance network info cache due to event network-changed-c4be71b6-096d-43a8-9cf0-74f91c97d74c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1520.268911] env[62816]: DEBUG oslo_concurrency.lockutils [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] Acquiring lock "refresh_cache-ba6e94c9-eb58-4040-8e28-f255961e76ca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.269244] env[62816]: DEBUG oslo_concurrency.lockutils [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] Acquired lock "refresh_cache-ba6e94c9-eb58-4040-8e28-f255961e76ca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.269568] env[62816]: DEBUG nova.network.neutron [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Refreshing network info cache for port c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1520.295404] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1520.295404] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1520.416483] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788306, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.455123] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6df11d22-3dfe-4eb7-9857-f8a2a9b75bb5 tempest-ServersTestBootFromVolume-2087831775 tempest-ServersTestBootFromVolume-2087831775-project-member] Lock "f1914aaa-1f3d-48b7-a6d2-ceea16dc786a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.323s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1520.530981] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788304, 'name': CreateVM_Task, 'duration_secs': 0.638167} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.531731] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1520.532184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.532184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.532517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1520.532885] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-187038b2-f8e1-438c-be71-a890defff2cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.541590] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1520.541590] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fde4e9-5974-6369-f534-4cdeea2f2bc5" [ 1520.541590] env[62816]: _type = "Task" [ 1520.541590] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.548637] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788305, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.553859] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fde4e9-5974-6369-f534-4cdeea2f2bc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.628293] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525fb84b-b014-e4fc-cd7a-b75a0f076a52, 'name': SearchDatastore_Task, 'duration_secs': 0.213242} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.629133] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20fe49e9-4861-49cc-90bc-52392c71b8c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.635043] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1520.635043] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52855eff-caa8-0ac6-64e7-10bac30a5b80" [ 1520.635043] env[62816]: _type = "Task" [ 1520.635043] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.643603] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52855eff-caa8-0ac6-64e7-10bac30a5b80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.691097] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1520.691220] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1520.691384] env[62816]: DEBUG nova.network.neutron [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1520.877258] env[62816]: DEBUG nova.network.neutron [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Successfully updated port: ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1520.919487] env[62816]: DEBUG oslo_vmware.api [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788306, 'name': PowerOnVM_Task, 'duration_secs': 0.5921} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.919833] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1520.919962] env[62816]: INFO nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1520.920163] env[62816]: DEBUG nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1520.921403] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961f3e3c-7dcd-4b5e-9a25-1a8bb5066e2b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.966980] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0b10aca0-950b-46f6-8367-5cb9ea7540c8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.967171] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 11a4d835-c149-49f0-8e4f-b3f9a7f1afca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.967317] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f6ddaab3-d420-4ee4-bf75-486228826635 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.967447] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance fb84cb48-d1a1-4eec-adb8-8edc585263df is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.967570] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 679cd9a3-2ed6-451f-b934-ba7738913959 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.967691] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.967806] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 42093232-a4e5-4cc3-ab1c-a0023a91e102 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.967929] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance cf6ff174-1324-42bd-a77a-905b9a333c27 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.968054] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 6767c231-2dcb-4d19-ae7c-5b026d48ed26 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968171] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0e0261fe-4376-487c-9d54-c4f37577409c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968285] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0a1a8539-940a-4a17-9826-82736be41892 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968399] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1c3392d3-cfb0-47c6-9366-8c363ad21297 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968513] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance e1067d45-1938-4021-b902-21a1aa57058a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968629] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968737] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.968857] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c6dc008c-6336-4271-9635-a7e0652138e0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.968980] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b409568f-6e04-4218-8a7b-1bbf785115c3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.969109] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance d16a99df-f092-4d56-9730-852883bbdb70 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.969286] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.969343] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance afd02433-0912-44ef-8e0e-71d6ee8fbb41 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1520.969445] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance e003e41d-93e8-4258-b8ca-3c2420b73df0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.969554] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 946dad01-c012-457d-8bfe-6395ff0aaedf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.969665] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 48b74d52-e764-4d14-b372-fc34872205dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.969774] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance ba6e94c9-eb58-4040-8e28-f255961e76ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.969882] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0dbf907f-0313-435c-a8be-19f7e48ded76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1520.981049] env[62816]: DEBUG nova.network.neutron [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Updated VIF entry in instance network info cache for port c4be71b6-096d-43a8-9cf0-74f91c97d74c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1520.981410] env[62816]: DEBUG nova.network.neutron [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Updating instance_info_cache with network_info: [{"id": "c4be71b6-096d-43a8-9cf0-74f91c97d74c", "address": "fa:16:3e:cf:db:de", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4be71b6-09", "ovs_interfaceid": "c4be71b6-096d-43a8-9cf0-74f91c97d74c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.046674] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788305, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.194331} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.049714] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. [ 1521.050484] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42527a78-22fb-470d-aba4-2c5831b22f74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.059064] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fde4e9-5974-6369-f534-4cdeea2f2bc5, 'name': SearchDatastore_Task, 'duration_secs': 0.139967} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.074917] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.075320] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1521.075577] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.085092] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1521.085541] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09044d51-2efb-4ed5-8246-6a5ce5fc62cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.107524] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1521.107524] env[62816]: value = "task-1788307" [ 1521.107524] env[62816]: _type = "Task" [ 1521.107524] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.116803] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788307, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.144514] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52855eff-caa8-0ac6-64e7-10bac30a5b80, 'name': SearchDatastore_Task, 'duration_secs': 0.073597} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.144795] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.145067] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 48b74d52-e764-4d14-b372-fc34872205dd/48b74d52-e764-4d14-b372-fc34872205dd.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1521.145357] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.145524] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1521.145815] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e68ff49-a399-4861-8ad5-c00cf506369d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.147810] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-356f4e7f-c874-47cb-a3f9-01abb82f22c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.153771] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1521.153771] env[62816]: value = "task-1788308" [ 1521.153771] env[62816]: _type = "Task" [ 1521.153771] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.157674] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1521.158010] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1521.158874] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44300bec-7ed6-4a0b-b21b-119588484e0a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.165429] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.168389] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1521.168389] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520640e5-45ed-664a-a706-fee99ce78905" [ 1521.168389] env[62816]: _type = "Task" [ 1521.168389] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.175812] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520640e5-45ed-664a-a706-fee99ce78905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.212440] env[62816]: DEBUG nova.compute.manager [req-eee2dbaf-a1fd-455d-b274-27f746385b6a req-35948c94-cf24-412b-8376-45394ef29b09 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-vif-plugged-ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1521.212440] env[62816]: DEBUG oslo_concurrency.lockutils [req-eee2dbaf-a1fd-455d-b274-27f746385b6a req-35948c94-cf24-412b-8376-45394ef29b09 service nova] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.212440] env[62816]: DEBUG oslo_concurrency.lockutils [req-eee2dbaf-a1fd-455d-b274-27f746385b6a req-35948c94-cf24-412b-8376-45394ef29b09 service nova] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.212440] env[62816]: DEBUG oslo_concurrency.lockutils [req-eee2dbaf-a1fd-455d-b274-27f746385b6a req-35948c94-cf24-412b-8376-45394ef29b09 service nova] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.212440] env[62816]: DEBUG nova.compute.manager [req-eee2dbaf-a1fd-455d-b274-27f746385b6a req-35948c94-cf24-412b-8376-45394ef29b09 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] No waiting events found dispatching network-vif-plugged-ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1521.213151] env[62816]: WARNING nova.compute.manager [req-eee2dbaf-a1fd-455d-b274-27f746385b6a req-35948c94-cf24-412b-8376-45394ef29b09 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received unexpected event network-vif-plugged-ae939699-528f-4716-8d38-8dc982cef0b3 for instance with vm_state active and task_state None. [ 1521.225298] env[62816]: DEBUG nova.network.neutron [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1521.364139] env[62816]: DEBUG nova.network.neutron [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1521.378891] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.379182] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.379416] env[62816]: DEBUG nova.network.neutron [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1521.439029] env[62816]: INFO nova.compute.manager [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Took 36.84 seconds to build instance. [ 1521.473633] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9bda24c6-f950-47ff-ad3c-ff745291870c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1521.484273] env[62816]: DEBUG oslo_concurrency.lockutils [req-802dc3bd-6414-4b03-b80a-849441f17333 req-0a752906-6623-4c69-b355-9e395e5d2a24 service nova] Releasing lock "refresh_cache-ba6e94c9-eb58-4040-8e28-f255961e76ca" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.618148] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788307, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.663327] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788308, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.678608] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520640e5-45ed-664a-a706-fee99ce78905, 'name': SearchDatastore_Task, 'duration_secs': 0.009055} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.679432] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba5c4e29-b752-45c9-b54b-688112b4a174 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.685019] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1521.685019] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52814ca5-a7d1-49ee-bf59-a5901d7d0d57" [ 1521.685019] env[62816]: _type = "Task" [ 1521.685019] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.693393] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52814ca5-a7d1-49ee-bf59-a5901d7d0d57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.866910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.867294] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Instance network_info: |[{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1521.867755] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:6b:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c0b05e-6d10-474c-9173-4c8f1dacac9f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe984819-7451-4e21-be74-349cfccd5318', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1521.877157] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Creating folder: Project (7931dc5c9a614764a02086f070df1b00). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1521.877705] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-967d55b2-b772-47f3-877a-84db8a6ce2cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.893191] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Created folder: Project (7931dc5c9a614764a02086f070df1b00) in parent group-v370905. [ 1521.893469] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Creating folder: Instances. Parent ref: group-v371027. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1521.893808] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-acbf3858-9814-4426-b5a5-999038cacf3f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.909018] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Created folder: Instances in parent group-v371027. [ 1521.909018] env[62816]: DEBUG oslo.service.loopingcall [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1521.909018] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1521.909018] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd342284-460f-4914-b3df-0ad6a8c4918c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.929695] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1521.929695] env[62816]: value = "task-1788311" [ 1521.929695] env[62816]: _type = "Task" [ 1521.929695] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.933536] env[62816]: WARNING nova.network.neutron [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] 31f515cd-2053-4577-9ed5-9de5fe666946 already exists in list: networks containing: ['31f515cd-2053-4577-9ed5-9de5fe666946']. ignoring it [ 1521.943224] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f47a2b42-23ab-442e-8d96-1151c5f7718d tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.434s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.943224] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788311, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.976712] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1522.119383] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788307, 'name': ReconfigVM_Task, 'duration_secs': 0.606895} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.119745] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Reconfigured VM instance instance-00000026 to attach disk [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1522.120728] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac8f10b-d055-4b09-b4a2-33951e7467dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.155024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-669139f5-7523-4532-8eb3-52aa146385eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.175506] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788308, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.176953] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1522.176953] env[62816]: value = "task-1788312" [ 1522.176953] env[62816]: _type = "Task" [ 1522.176953] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.185708] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788312, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.197536] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52814ca5-a7d1-49ee-bf59-a5901d7d0d57, 'name': SearchDatastore_Task, 'duration_secs': 0.231583} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.197775] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.198064] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ba6e94c9-eb58-4040-8e28-f255961e76ca/ba6e94c9-eb58-4040-8e28-f255961e76ca.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1522.198476] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c1ef995-1792-44d1-9ef5-28f852eb8fee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.204814] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1522.204814] env[62816]: value = "task-1788313" [ 1522.204814] env[62816]: _type = "Task" [ 1522.204814] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.213118] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.304101] env[62816]: DEBUG nova.network.neutron [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ae939699-528f-4716-8d38-8dc982cef0b3", "address": "fa:16:3e:c6:59:70", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae939699-52", "ovs_interfaceid": "ae939699-528f-4716-8d38-8dc982cef0b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.438984] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788311, 'name': CreateVM_Task, 'duration_secs': 0.432154} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.439178] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1522.439862] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.440040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.440393] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1522.440682] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63f2602f-830d-4ff5-9290-7babb1b05b3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.445493] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1522.447990] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1522.447990] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521ff896-eecd-7c9e-d735-c50bb38b4de6" [ 1522.447990] env[62816]: _type = "Task" [ 1522.447990] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.457289] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521ff896-eecd-7c9e-d735-c50bb38b4de6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.480113] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa39281-b46d-4c4b-b136-11e3003e7834 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.484747] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1056fc6e-af1e-4d63-a9ce-9ade4dd73891 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1522.494022] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Suspending the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1522.494301] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-0e4c624b-9853-4846-8961-ad144ef0393a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.506015] env[62816]: DEBUG oslo_vmware.api [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1522.506015] env[62816]: value = "task-1788314" [ 1522.506015] env[62816]: _type = "Task" [ 1522.506015] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.514791] env[62816]: DEBUG oslo_vmware.api [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788314, 'name': SuspendVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.676236] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788308, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.198314} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.676530] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 48b74d52-e764-4d14-b372-fc34872205dd/48b74d52-e764-4d14-b372-fc34872205dd.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1522.676746] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1522.677014] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d30c48ff-15cb-43cd-a570-844ea2275619 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.687375] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788312, 'name': ReconfigVM_Task, 'duration_secs': 0.230169} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.688795] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.689203] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1522.689203] env[62816]: value = "task-1788315" [ 1522.689203] env[62816]: _type = "Task" [ 1522.689203] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.689413] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fbf28b4-8e86-4612-a338-c965416c77bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.700144] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.701685] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1522.701685] env[62816]: value = "task-1788316" [ 1522.701685] env[62816]: _type = "Task" [ 1522.701685] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.717422] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788316, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.719475] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.811022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.811985] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.812269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.813341] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb6ffe5-fe4b-46f3-9c83-7ff32af3162d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1522.835405] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1522.835673] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1522.835937] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1522.836131] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1522.836361] env[62816]: DEBUG nova.virt.hardware [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1522.843217] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Reconfiguring VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1522.843687] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e85c9dfc-bbef-4aa4-bce9-bfb54bdd7e46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.863203] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1522.863203] env[62816]: value = "task-1788317" [ 1522.863203] env[62816]: _type = "Task" [ 1522.863203] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.871685] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.963146] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521ff896-eecd-7c9e-d735-c50bb38b4de6, 'name': SearchDatastore_Task, 'duration_secs': 0.020185} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.963703] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.964046] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1522.964046] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.964211] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.964349] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.964632] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7704da6e-b4c6-4803-8024-10bd3a21547e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.978897] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.986989] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.987217] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1522.988337] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5efbbf-6632-44c1-aa05-05020e199b66 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.991083] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f9d9593a-1c25-47a1-98fd-4462a851f134 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1522.995670] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1522.995670] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0" [ 1522.995670] env[62816]: _type = "Task" [ 1522.995670] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.004348] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.016378] env[62816]: DEBUG oslo_vmware.api [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788314, 'name': SuspendVM_Task} progress is 62%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.201600] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.215112] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788316, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.217792] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.344149] env[62816]: DEBUG nova.compute.manager [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received event network-vif-plugged-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1523.344422] env[62816]: DEBUG oslo_concurrency.lockutils [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] Acquiring lock "0dbf907f-0313-435c-a8be-19f7e48ded76-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.344685] env[62816]: DEBUG oslo_concurrency.lockutils [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.344899] env[62816]: DEBUG oslo_concurrency.lockutils [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.345148] env[62816]: DEBUG nova.compute.manager [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] No waiting events found dispatching network-vif-plugged-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1523.345375] env[62816]: WARNING nova.compute.manager [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received unexpected event network-vif-plugged-fe984819-7451-4e21-be74-349cfccd5318 for instance with vm_state building and task_state spawning. [ 1523.347615] env[62816]: DEBUG nova.compute.manager [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received event network-changed-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1523.347615] env[62816]: DEBUG nova.compute.manager [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing instance network info cache due to event network-changed-fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1523.347615] env[62816]: DEBUG oslo_concurrency.lockutils [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.347615] env[62816]: DEBUG oslo_concurrency.lockutils [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.347615] env[62816]: DEBUG nova.network.neutron [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing network info cache for port fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1523.374986] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.495166] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 83f7b5b8-228b-4d17-ab52-8df65fe247e3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1523.506161] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.517581] env[62816]: DEBUG oslo_vmware.api [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788314, 'name': SuspendVM_Task, 'duration_secs': 0.963864} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.517863] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Suspended the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1523.518057] env[62816]: DEBUG nova.compute.manager [None req-675f7a89-9ded-4fdd-9083-b7c12e36bf90 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1523.518834] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e2709a-cb3a-4475-8e9c-6242c63d70fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.702625] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.716329] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788316, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.718279] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.875584] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.008166] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance a01e772c-dafe-4091-bae6-f9f59d5c972d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.012840] env[62816]: DEBUG nova.compute.manager [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-changed-ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1524.013039] env[62816]: DEBUG nova.compute.manager [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Refreshing instance network info cache due to event network-changed-ae939699-528f-4716-8d38-8dc982cef0b3. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1524.013256] env[62816]: DEBUG oslo_concurrency.lockutils [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] Acquiring lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.013400] env[62816]: DEBUG oslo_concurrency.lockutils [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] Acquired lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.013558] env[62816]: DEBUG nova.network.neutron [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Refreshing network info cache for port ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1524.026941] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.136646] env[62816]: DEBUG nova.network.neutron [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updated VIF entry in instance network info cache for port fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1524.137019] env[62816]: DEBUG nova.network.neutron [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.201689] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.215905] env[62816]: DEBUG oslo_vmware.api [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788316, 'name': PowerOnVM_Task, 'duration_secs': 1.109801} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.219118] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1524.220795] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.221789] env[62816]: DEBUG nova.compute.manager [None req-ff0c3ffa-4228-4366-a770-6122f7f84747 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1524.222557] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a08b661-5f21-4e88-8e61-a0506948dfcd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.375547] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.510125] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.524278] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 049e1f97-ab58-4797-a084-f16a7a58e2cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1524.639237] env[62816]: DEBUG oslo_concurrency.lockutils [req-a44692b6-b216-4fec-8a92-f064a3895261 req-fa3db9ac-f958-4055-bbc2-29da77537558 service nova] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.702541] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.718787] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.750163] env[62816]: DEBUG nova.network.neutron [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updated VIF entry in instance network info cache for port ae939699-528f-4716-8d38-8dc982cef0b3. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1524.750692] env[62816]: DEBUG nova.network.neutron [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ae939699-528f-4716-8d38-8dc982cef0b3", "address": "fa:16:3e:c6:59:70", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapae939699-52", "ovs_interfaceid": "ae939699-528f-4716-8d38-8dc982cef0b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.856830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1524.857094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.877329] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.011086] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.027170] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 3c4cca03-b2ee-48a2-9a15-a21124bd6599 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1525.027568] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1525.027672] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1525.204517] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.217905] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.255093] env[62816]: DEBUG oslo_concurrency.lockutils [req-43c4fda8-5c79-4b20-8728-4d624934a675 req-d732b464-a773-45be-b77e-9de416b184b5 service nova] Releasing lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.360309] env[62816]: DEBUG nova.compute.utils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.381380] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.462575] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd33068-b178-42d9-a82a-f6c379797432 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.471943] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f338e0b0-7269-4708-ae26-1fe980b3bf88 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.508776] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fd66580-c3b3-420c-92dd-ff5358b28d13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.518986] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.520337] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc2a456-1e57-458a-8f31-863a783d6527 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.534607] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1525.705192] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.718289] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.864395] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.877668] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.014580] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.037675] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1526.205903] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.218827] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.378688] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.518439] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215145-c7c0-4a88-a9fc-aeb13e6725f0, 'name': SearchDatastore_Task, 'duration_secs': 3.125596} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.519491] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbcc58fb-3e91-4772-b23d-18a8df294a75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.524919] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1526.524919] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529e09a5-e2d2-26d0-5c3b-c58daf8e43e0" [ 1526.524919] env[62816]: _type = "Task" [ 1526.524919] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.532790] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529e09a5-e2d2-26d0-5c3b-c58daf8e43e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.542567] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1526.543017] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.619s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.543086] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.318s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.546026] env[62816]: INFO nova.compute.claims [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1526.547359] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1526.547438] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1526.706154] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788315, 'name': ExtendVirtualDisk_Task, 'duration_secs': 3.733895} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.706449] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1526.707224] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8613edee-3c61-4545-bf1e-8887d564d359 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.732217] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 48b74d52-e764-4d14-b372-fc34872205dd/48b74d52-e764-4d14-b372-fc34872205dd.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.733413] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4d3f7e8-21de-47fe-b29d-0159805ca244 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.751823] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788313, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.199092} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.752546] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ba6e94c9-eb58-4040-8e28-f255961e76ca/ba6e94c9-eb58-4040-8e28-f255961e76ca.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1526.752926] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1526.753087] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44f077c6-6131-42a2-8894-1e42672897b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.757860] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1526.757860] env[62816]: value = "task-1788318" [ 1526.757860] env[62816]: _type = "Task" [ 1526.757860] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.761888] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1526.761888] env[62816]: value = "task-1788319" [ 1526.761888] env[62816]: _type = "Task" [ 1526.761888] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.768463] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788318, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.773607] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.879653] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.938889] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.939290] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.939540] env[62816]: INFO nova.compute.manager [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Attaching volume ee7bf020-3b58-4597-a084-12bb888e6072 to /dev/sdb [ 1526.976668] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e63d532d-b604-428d-8704-0d8dcc3764d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.986680] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2e6c449-4354-4d8f-b91e-62d4c5890e1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.000840] env[62816]: DEBUG nova.virt.block_device [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating existing volume attachment record: 848c2f5b-12b4-44a5-8c88-e9e516a8b9f9 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1527.041393] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529e09a5-e2d2-26d0-5c3b-c58daf8e43e0, 'name': SearchDatastore_Task, 'duration_secs': 0.009308} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.041779] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.042134] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0dbf907f-0313-435c-a8be-19f7e48ded76/0dbf907f-0313-435c-a8be-19f7e48ded76.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1527.042487] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-68477477-936a-4b1e-919b-f4d73a8aabee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.050165] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1527.050165] env[62816]: value = "task-1788320" [ 1527.050165] env[62816]: _type = "Task" [ 1527.050165] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.060210] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] There are 17 instances to clean {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1527.060486] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 128bd207-a483-4b38-9fd4-4fb996ce1d0d] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1527.068161] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788320, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.281446] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788318, 'name': ReconfigVM_Task, 'duration_secs': 0.464183} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.281446] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060654} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.281446] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 48b74d52-e764-4d14-b372-fc34872205dd/48b74d52-e764-4d14-b372-fc34872205dd.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1527.281596] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1527.281927] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c0604c4-52ca-4487-b5b6-fef9ebe6b8fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.285897] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5691b69c-ff5a-4588-9d82-7c2e79f69383 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.316530] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] ba6e94c9-eb58-4040-8e28-f255961e76ca/ba6e94c9-eb58-4040-8e28-f255961e76ca.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1527.320024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d827f34d-8cb3-435e-aac1-f93623931442 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.334319] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1527.334319] env[62816]: value = "task-1788323" [ 1527.334319] env[62816]: _type = "Task" [ 1527.334319] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.339999] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1527.339999] env[62816]: value = "task-1788325" [ 1527.339999] env[62816]: _type = "Task" [ 1527.339999] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.342114] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528bd63c-0516-d00a-6b8c-b7bf255dd0d7/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1527.343417] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a402ed5-933d-44e9-9e79-639f8f3986ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.352097] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788323, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.355677] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528bd63c-0516-d00a-6b8c-b7bf255dd0d7/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1527.355841] env[62816]: ERROR oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528bd63c-0516-d00a-6b8c-b7bf255dd0d7/disk-0.vmdk due to incomplete transfer. [ 1527.359121] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-11184841-50af-4510-9ef8-d0ab5663edef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.360784] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788325, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.366384] env[62816]: DEBUG oslo_vmware.rw_handles [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/528bd63c-0516-d00a-6b8c-b7bf255dd0d7/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1527.366601] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Uploaded image 60677bd4-81fd-4001-9e6f-2de81a3b0680 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1527.368560] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1527.368853] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e7cc1603-b7ac-4230-96bf-47ba24fa522d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.379020] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.380882] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1527.380882] env[62816]: value = "task-1788326" [ 1527.380882] env[62816]: _type = "Task" [ 1527.380882] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.388754] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788326, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.560491] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788320, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.568849] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: a6b06048-6cdc-497e-8c5d-b6a26d3e7557] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1527.851936] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788323, 'name': Rename_Task, 'duration_secs': 0.176563} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.852777] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1527.852925] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3863413b-c046-4849-bc9b-4e51c9de40a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.857880] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788325, 'name': ReconfigVM_Task, 'duration_secs': 0.387213} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.860435] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Reconfigured VM instance instance-00000029 to attach disk [datastore1] ba6e94c9-eb58-4040-8e28-f255961e76ca/ba6e94c9-eb58-4040-8e28-f255961e76ca.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1527.862247] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60247458-6a62-4c55-a994-ab1fba7e7bc7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.863849] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1527.863849] env[62816]: value = "task-1788327" [ 1527.863849] env[62816]: _type = "Task" [ 1527.863849] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.868391] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1527.868391] env[62816]: value = "task-1788328" [ 1527.868391] env[62816]: _type = "Task" [ 1527.868391] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.874559] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788327, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.891983] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.892199] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788328, 'name': Rename_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.899579] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788326, 'name': Destroy_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.060722] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788320, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.630143} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.061937] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0dbf907f-0313-435c-a8be-19f7e48ded76/0dbf907f-0313-435c-a8be-19f7e48ded76.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1528.062193] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1528.062923] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c356ac6d-369b-4434-a6f9-1d6e98e009f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.065481] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4768b5a0-dbd0-4b23-a9ff-df27b26d1f32 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.069463] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0c5c5c06-0b5e-4e11-84b5-ca76828a0565] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1528.074631] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1528.074631] env[62816]: value = "task-1788329" [ 1528.074631] env[62816]: _type = "Task" [ 1528.074631] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.075946] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d60302-2e35-494e-b964-6420d2eb5350 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.084715] env[62816]: DEBUG nova.compute.manager [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1528.084715] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c669a745-f351-42c9-b8f8-379f1e7d7c67 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.117238] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43c14b4-b0b2-44a5-89b4-643479132c5d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.119824] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.129147] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5f7d17e-4790-4efb-ae64-9da96e849ce5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.145560] env[62816]: DEBUG nova.compute.provider_tree [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1528.311024] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "ee543138-1c43-46c4-a512-1977fa5eb3c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.311257] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.377672] env[62816]: DEBUG oslo_vmware.api [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788327, 'name': PowerOnVM_Task, 'duration_secs': 0.498573} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.384125] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1528.384125] env[62816]: INFO nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Took 14.07 seconds to spawn the instance on the hypervisor. [ 1528.384125] env[62816]: DEBUG nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1528.384125] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788328, 'name': Rename_Task, 'duration_secs': 0.304567} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.384798] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e7ea6ce-bb54-4ddb-8192-385602abe597 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.387336] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1528.390545] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e36f1966-07ce-4710-8be4-899456080915 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.396372] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.406369] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788326, 'name': Destroy_Task, 'duration_secs': 0.530611} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.406369] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1528.406369] env[62816]: value = "task-1788330" [ 1528.406369] env[62816]: _type = "Task" [ 1528.406369] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.406369] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Destroyed the VM [ 1528.406369] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1528.406369] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-de2b202c-25f7-4973-9802-d3a54b075ca0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.415341] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788330, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.416599] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1528.416599] env[62816]: value = "task-1788331" [ 1528.416599] env[62816]: _type = "Task" [ 1528.416599] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.424071] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788331, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.575891] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 914b187f-b05f-49d4-bf61-d536ef61934d] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1528.587356] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065653} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.587629] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1528.588685] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf26c56-b314-413a-aa44-a3030c127798 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.612747] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 0dbf907f-0313-435c-a8be-19f7e48ded76/0dbf907f-0313-435c-a8be-19f7e48ded76.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1528.613813] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-175bc5a0-e57a-4d55-9f63-cceea46b33c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.637102] env[62816]: INFO nova.compute.manager [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] instance snapshotting [ 1528.637102] env[62816]: WARNING nova.compute.manager [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1528.637602] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b336fa-5783-43ac-be3c-36172dd8a550 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.641838] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1528.641838] env[62816]: value = "task-1788332" [ 1528.641838] env[62816]: _type = "Task" [ 1528.641838] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.669143] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a667319d-4000-444b-8960-78c61ccf86d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.675721] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.684963] env[62816]: ERROR nova.scheduler.client.report [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [req-890eaa67-4c95-43f6-9735-10eba99ddf23] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-890eaa67-4c95-43f6-9735-10eba99ddf23"}]} [ 1528.702528] env[62816]: DEBUG nova.scheduler.client.report [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1528.716188] env[62816]: DEBUG nova.scheduler.client.report [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1528.716507] env[62816]: DEBUG nova.compute.provider_tree [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1528.732353] env[62816]: DEBUG nova.scheduler.client.report [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1528.750739] env[62816]: DEBUG nova.scheduler.client.report [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1528.888882] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.926164] env[62816]: INFO nova.compute.manager [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Took 42.41 seconds to build instance. [ 1528.932161] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788330, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.937401] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788331, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.079463] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4a6ac464-a5e0-4ed6-909d-f1730be14380] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1529.153762] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788332, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.185995] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1529.186515] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e7f64837-3dc9-4250-ae8e-61bad00a7716 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.195281] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1529.195281] env[62816]: value = "task-1788333" [ 1529.195281] env[62816]: _type = "Task" [ 1529.195281] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.205701] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788333, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.259089] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc7654e-d47e-4037-9f31-9eec20661213 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.266602] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ed558d-03a0-4fb6-93e7-aeae37f64c59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.303845] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ab7bb2-430e-4894-bd64-d82f34d6ed9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.318141] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587f0a19-0070-4d8a-a42f-74a5633b528d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.334541] env[62816]: DEBUG nova.compute.provider_tree [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1529.387455] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.421663] env[62816]: DEBUG oslo_vmware.api [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788330, 'name': PowerOnVM_Task, 'duration_secs': 0.889621} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.421985] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1529.422211] env[62816]: INFO nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Took 12.40 seconds to spawn the instance on the hypervisor. [ 1529.422395] env[62816]: DEBUG nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1529.423408] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65091573-b245-4d95-a1d0-547a7a58327e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.436832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee9c5e5d-3fff-407c-84a1-758cc7fea20c tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "48b74d52-e764-4d14-b372-fc34872205dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.224s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.437104] env[62816]: DEBUG oslo_vmware.api [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788331, 'name': RemoveSnapshot_Task, 'duration_secs': 0.937253} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.438414] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1529.438641] env[62816]: INFO nova.compute.manager [None req-fe553a4c-4561-4939-b841-4fa142aaf5ae tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Took 16.86 seconds to snapshot the instance on the hypervisor. [ 1529.583102] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 52670f9e-0cb7-4464-be9c-7b0d8346f60f] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1529.652418] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788332, 'name': ReconfigVM_Task, 'duration_secs': 0.547689} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.652699] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 0dbf907f-0313-435c-a8be-19f7e48ded76/0dbf907f-0313-435c-a8be-19f7e48ded76.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1529.653339] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54b3fd01-3d05-4aed-ad79-9131890b6791 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.664224] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1529.664224] env[62816]: value = "task-1788335" [ 1529.664224] env[62816]: _type = "Task" [ 1529.664224] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.671839] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788335, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.703726] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788333, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.812774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.813101] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.813357] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1529.813509] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.813683] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.815961] env[62816]: INFO nova.compute.manager [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Terminating instance [ 1529.817708] env[62816]: DEBUG nova.compute.manager [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1529.817910] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1529.818725] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c000986f-75eb-4755-9e8d-9d645e8ec3b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.826643] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1529.826977] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3b1064d-81b3-40fc-b46d-b0ef8431b9e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.833470] env[62816]: DEBUG oslo_vmware.api [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1529.833470] env[62816]: value = "task-1788336" [ 1529.833470] env[62816]: _type = "Task" [ 1529.833470] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.846705] env[62816]: DEBUG oslo_vmware.api [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.875400] env[62816]: DEBUG nova.scheduler.client.report [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1529.877394] env[62816]: DEBUG nova.compute.provider_tree [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 70 to 71 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1529.877394] env[62816]: DEBUG nova.compute.provider_tree [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1529.889904] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.943075] env[62816]: DEBUG nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1529.950156] env[62816]: INFO nova.compute.manager [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Took 39.14 seconds to build instance. [ 1530.086780] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f1914aaa-1f3d-48b7-a6d2-ceea16dc786a] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1530.173675] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788335, 'name': Rename_Task, 'duration_secs': 0.223621} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.174141] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1530.174564] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8666969c-bba1-4148-b9e7-6a2b9d7447ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.181799] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1530.181799] env[62816]: value = "task-1788337" [ 1530.181799] env[62816]: _type = "Task" [ 1530.181799] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.191299] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788337, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.203917] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788333, 'name': CreateSnapshot_Task, 'duration_secs': 0.998949} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.204268] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1530.204931] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbd41b7-8469-444c-9baf-8bdf2eca34a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.343625] env[62816]: DEBUG oslo_vmware.api [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788336, 'name': PowerOffVM_Task, 'duration_secs': 0.319099} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.344070] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1530.344343] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1530.344708] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-be75cc04-f223-4329-827e-d9add9144c37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.384732] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.842s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.385328] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1530.388287] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.397s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.388482] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.390473] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.483s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.390731] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.392422] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.863s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.393845] env[62816]: INFO nova.compute.claims [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1530.402780] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.418749] env[62816]: INFO nova.scheduler.client.report [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Deleted allocations for instance f6ddaab3-d420-4ee4-bf75-486228826635 [ 1530.422184] env[62816]: INFO nova.scheduler.client.report [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Deleted allocations for instance c6dc008c-6336-4271-9635-a7e0652138e0 [ 1530.452099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9981b728-8b32-477d-bae4-b09d15d954d7 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.171s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.465967] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.591035] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 2bc7f973-007d-44bd-aae8-d3b62506efba] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1530.692363] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788337, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.722838] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1530.723440] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2fff826a-73de-41fc-b494-1f22157ed2f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.732753] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1530.732753] env[62816]: value = "task-1788339" [ 1530.732753] env[62816]: _type = "Task" [ 1530.732753] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.742218] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788339, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.891159] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.899542] env[62816]: DEBUG nova.compute.utils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1530.903138] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1530.903341] env[62816]: DEBUG nova.network.neutron [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1530.934420] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1de5459d-bcf8-4000-85cb-38db83189de8 tempest-ServerDiagnosticsTest-1146347016 tempest-ServerDiagnosticsTest-1146347016-project-member] Lock "c6dc008c-6336-4271-9635-a7e0652138e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.138s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.935730] env[62816]: DEBUG oslo_concurrency.lockutils [None req-de0e6999-2c85-4d99-a564-a13ec9d0a0f9 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "f6ddaab3-d420-4ee4-bf75-486228826635" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.849s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.957470] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1530.963047] env[62816]: DEBUG nova.policy [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f062fc536a1c4bbeabcb41197b1bc4fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f981032701b04b14841045ed05cbe9a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1530.985162] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "31ac8296-14fa-46f7-b825-c31904b832d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.985414] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "31ac8296-14fa-46f7-b825-c31904b832d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.010292] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1531.010574] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1531.010782] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Deleting the datastore file [datastore1] 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1531.011118] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc45aaf2-51b4-45c9-9887-2aab37778706 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.019730] env[62816]: DEBUG oslo_vmware.api [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for the task: (returnval){ [ 1531.019730] env[62816]: value = "task-1788340" [ 1531.019730] env[62816]: _type = "Task" [ 1531.019730] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.028924] env[62816]: DEBUG oslo_vmware.api [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788340, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.095481] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 455052cc-292a-414c-8c83-bc512c49a197] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1531.193077] env[62816]: DEBUG oslo_vmware.api [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788337, 'name': PowerOnVM_Task, 'duration_secs': 0.676969} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.193433] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1531.193724] env[62816]: INFO nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Took 11.48 seconds to spawn the instance on the hypervisor. [ 1531.193974] env[62816]: DEBUG nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1531.194834] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41533a27-3e59-48ff-bef6-b53795613e46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.244353] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788339, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.280309] env[62816]: DEBUG nova.network.neutron [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Successfully created port: 5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1531.391943] env[62816]: DEBUG oslo_vmware.api [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788317, 'name': ReconfigVM_Task, 'duration_secs': 8.326247} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.392304] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.392547] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Reconfigured VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1531.404182] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1531.480858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.532445] env[62816]: DEBUG oslo_vmware.api [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Task: {'id': task-1788340, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206543} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.532709] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1531.532906] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1531.533111] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1531.533285] env[62816]: INFO nova.compute.manager [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1531.533533] env[62816]: DEBUG oslo.service.loopingcall [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.533723] env[62816]: DEBUG nova.compute.manager [-] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1531.533981] env[62816]: DEBUG nova.network.neutron [-] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1531.554698] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1531.555077] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371031', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'name': 'volume-ee7bf020-3b58-4597-a084-12bb888e6072', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c3392d3-cfb0-47c6-9366-8c363ad21297', 'attached_at': '', 'detached_at': '', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'serial': 'ee7bf020-3b58-4597-a084-12bb888e6072'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1531.555975] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667e8c48-695a-4d72-8760-83416019ad08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.581552] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f95910-4bf2-4924-94cb-562f2fdde679 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.601252] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: de33d02f-7e34-4619-a2ed-cda6c54aa030] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1531.611315] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] volume-ee7bf020-3b58-4597-a084-12bb888e6072/volume-ee7bf020-3b58-4597-a084-12bb888e6072.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1531.614227] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2b09a708-aba4-4e85-923e-1b67e4d86b92 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.634900] env[62816]: DEBUG oslo_vmware.api [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1531.634900] env[62816]: value = "task-1788341" [ 1531.634900] env[62816]: _type = "Task" [ 1531.634900] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.647107] env[62816]: DEBUG oslo_vmware.api [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788341, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.715748] env[62816]: INFO nova.compute.manager [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Took 39.22 seconds to build instance. [ 1531.745751] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788339, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.897535] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f250ddbe-dbd5-4579-9dda-ccbac7d0a06e tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-6767c231-2dcb-4d19-ae7c-5b026d48ed26-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 14.340s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.917598] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f3991a-4961-4d4c-b84f-3fe0797df8b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.927087] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d82b523-936b-4240-a959-fde9ec5e3ae7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.966736] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ad54b2-cc8a-42d4-93b8-b7386f83fe66 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.976523] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aca7a60-442d-4a33-a267-f18d3f1bef8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.993468] env[62816]: DEBUG nova.compute.provider_tree [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1532.104414] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 66745316-2735-4c49-b1a2-f9e547211761] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1532.145439] env[62816]: DEBUG oslo_vmware.api [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788341, 'name': ReconfigVM_Task, 'duration_secs': 0.390418} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.145748] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Reconfigured VM instance instance-0000001d to attach disk [datastore1] volume-ee7bf020-3b58-4597-a084-12bb888e6072/volume-ee7bf020-3b58-4597-a084-12bb888e6072.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1532.150918] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18f64951-b33f-4c36-9bcb-f3f5bc4c51e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.167429] env[62816]: DEBUG oslo_vmware.api [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1532.167429] env[62816]: value = "task-1788342" [ 1532.167429] env[62816]: _type = "Task" [ 1532.167429] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.176396] env[62816]: DEBUG oslo_vmware.api [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788342, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.217226] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bdef4f56-0385-465f-8145-e7af27cf6f92 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.093s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.244555] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788339, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.423114] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1532.443689] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0af33be3-1673-42f1-a298-c50b616c7610',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2008575729',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1532.443970] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1532.444153] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1532.444455] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1532.444520] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1532.444625] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1532.444857] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1532.445470] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1532.445470] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1532.445470] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1532.445638] env[62816]: DEBUG nova.virt.hardware [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1532.446613] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5893b5-1a2d-4413-8808-a3158632a27d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.455119] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5cab825-aa19-4a48-ac15-a67ddd760329 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.522512] env[62816]: ERROR nova.scheduler.client.report [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [req-fd0a45fe-a2fa-41ab-a35c-be693406935f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-fd0a45fe-a2fa-41ab-a35c-be693406935f"}]} [ 1532.524993] env[62816]: DEBUG nova.network.neutron [-] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.540036] env[62816]: DEBUG nova.scheduler.client.report [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1532.565772] env[62816]: DEBUG nova.scheduler.client.report [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1532.567063] env[62816]: DEBUG nova.compute.provider_tree [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1532.579159] env[62816]: DEBUG nova.scheduler.client.report [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1532.603729] env[62816]: DEBUG nova.scheduler.client.report [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1532.609938] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 927badc2-decf-49af-b2c0-d95b471272c9] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1532.677504] env[62816]: DEBUG oslo_vmware.api [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788342, 'name': ReconfigVM_Task, 'duration_secs': 0.21666} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.682017] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371031', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'name': 'volume-ee7bf020-3b58-4597-a084-12bb888e6072', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c3392d3-cfb0-47c6-9366-8c363ad21297', 'attached_at': '', 'detached_at': '', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'serial': 'ee7bf020-3b58-4597-a084-12bb888e6072'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1532.720076] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1532.746727] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788339, 'name': CloneVM_Task, 'duration_secs': 1.692194} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.747020] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Created linked-clone VM from snapshot [ 1532.747782] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4239fee8-f8d4-4e80-ba13-5f6e81e05db3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.760409] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Uploading image f159b3d5-89f6-46c6-809c-8a238179a62d {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1532.789020] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1532.789020] env[62816]: value = "vm-371033" [ 1532.789020] env[62816]: _type = "VirtualMachine" [ 1532.789020] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1532.789321] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2a73a70f-421a-4662-8a47-49bf15fadcf0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.799732] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease: (returnval){ [ 1532.799732] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c02bb7-4ca1-678d-6f9a-2d10a649896e" [ 1532.799732] env[62816]: _type = "HttpNfcLease" [ 1532.799732] env[62816]: } obtained for exporting VM: (result){ [ 1532.799732] env[62816]: value = "vm-371033" [ 1532.799732] env[62816]: _type = "VirtualMachine" [ 1532.799732] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1532.800153] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the lease: (returnval){ [ 1532.800153] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c02bb7-4ca1-678d-6f9a-2d10a649896e" [ 1532.800153] env[62816]: _type = "HttpNfcLease" [ 1532.800153] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1532.812915] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1532.812915] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c02bb7-4ca1-678d-6f9a-2d10a649896e" [ 1532.812915] env[62816]: _type = "HttpNfcLease" [ 1532.812915] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1533.002274] env[62816]: DEBUG nova.compute.manager [req-1adde543-d571-4aa8-9137-cc8878e6a5fd req-6066a13c-89f0-4ab0-8113-5f36705627c4 service nova] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Received event network-vif-deleted-89c39d07-acd3-4f92-a168-921d07739ac6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.004579] env[62816]: DEBUG nova.network.neutron [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Successfully updated port: 5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1533.029036] env[62816]: INFO nova.compute.manager [-] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Took 1.49 seconds to deallocate network for instance. [ 1533.049910] env[62816]: DEBUG nova.compute.manager [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1533.051065] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a00f023-dc5b-4ea3-bfdf-b4c239db46ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.116875] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 7be4c8f8-240c-4a71-93bb-aeb94243d781] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1533.175674] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589cbf62-3111-4643-b1dd-99841df3e342 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.183941] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebad218c-7e69-4673-944b-951fc7172f94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.231881] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d17646-50f5-48f3-8670-8c02e43c1ec7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.243504] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edea657c-3212-4b6c-bd0d-d2722533a7a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.259574] env[62816]: DEBUG nova.compute.provider_tree [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.262717] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.310175] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1533.310175] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c02bb7-4ca1-678d-6f9a-2d10a649896e" [ 1533.310175] env[62816]: _type = "HttpNfcLease" [ 1533.310175] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1533.310175] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1533.310175] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c02bb7-4ca1-678d-6f9a-2d10a649896e" [ 1533.310175] env[62816]: _type = "HttpNfcLease" [ 1533.310175] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1533.310460] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5314e5-6ad4-4566-90b2-b5ddd4b5a75f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.317952] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525562e8-59ce-bbd5-8588-86fbecbba4bf/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1533.318227] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525562e8-59ce-bbd5-8588-86fbecbba4bf/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1533.382611] env[62816]: DEBUG nova.compute.manager [req-aa8932ca-cc4c-419b-acf8-0e53806742ff req-b9b4f6cd-8ea4-4d81-9bb4-b59364f6766a service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Received event network-vif-plugged-5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.382844] env[62816]: DEBUG oslo_concurrency.lockutils [req-aa8932ca-cc4c-419b-acf8-0e53806742ff req-b9b4f6cd-8ea4-4d81-9bb4-b59364f6766a service nova] Acquiring lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.383051] env[62816]: DEBUG oslo_concurrency.lockutils [req-aa8932ca-cc4c-419b-acf8-0e53806742ff req-b9b4f6cd-8ea4-4d81-9bb4-b59364f6766a service nova] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1533.383222] env[62816]: DEBUG oslo_concurrency.lockutils [req-aa8932ca-cc4c-419b-acf8-0e53806742ff req-b9b4f6cd-8ea4-4d81-9bb4-b59364f6766a service nova] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1533.383385] env[62816]: DEBUG nova.compute.manager [req-aa8932ca-cc4c-419b-acf8-0e53806742ff req-b9b4f6cd-8ea4-4d81-9bb4-b59364f6766a service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] No waiting events found dispatching network-vif-plugged-5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1533.383546] env[62816]: WARNING nova.compute.manager [req-aa8932ca-cc4c-419b-acf8-0e53806742ff req-b9b4f6cd-8ea4-4d81-9bb4-b59364f6766a service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Received unexpected event network-vif-plugged-5b2b9d44-f66e-428f-a75c-6e213ebdb364 for instance with vm_state building and task_state spawning. [ 1533.430620] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7f102019-b87d-4fd5-913d-0ef7b670fba1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.482042] env[62816]: DEBUG nova.compute.manager [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1533.483121] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c566afe-eb25-4c0e-802c-b36566857bd7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.509758] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.509949] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.510135] env[62816]: DEBUG nova.network.neutron [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1533.533863] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.564204] env[62816]: INFO nova.compute.manager [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] instance snapshotting [ 1533.567114] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797b0a2d-c48a-40b8-a7ad-f57a267f1977 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.589706] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94499c74-d929-4a25-a53d-4bbfcc263d59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.621604] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: ce527ce8-07b6-47a6-bab9-7934a3dda9b3] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1533.740625] env[62816]: DEBUG nova.objects.instance [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1533.810525] env[62816]: DEBUG nova.scheduler.client.report [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 72 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1533.813449] env[62816]: DEBUG nova.compute.provider_tree [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 72 to 73 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1533.813449] env[62816]: DEBUG nova.compute.provider_tree [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1533.994786] env[62816]: INFO nova.compute.manager [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] instance snapshotting [ 1533.997515] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a34b8ae-2dc0-4fa2-b5be-59a60ce1b8e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.022154] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef83914-4366-42bf-b96c-8dc9590c7cab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.083564] env[62816]: DEBUG nova.network.neutron [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1534.104520] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1534.105585] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0f80afd7-4735-40f8-8cf9-bd4f9012599a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.118570] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1534.118570] env[62816]: value = "task-1788344" [ 1534.118570] env[62816]: _type = "Task" [ 1534.118570] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.130267] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f06102d6-be5c-40d1-ae1d-8ae8190fd0d7] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1534.136019] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788344, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.248550] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4a425cb4-48a2-4385-83f8-a397ad0946c7 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.309s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.318107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.925s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.318627] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1534.321249] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.123s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.323219] env[62816]: INFO nova.compute.claims [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1534.394017] env[62816]: DEBUG nova.network.neutron [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.466896] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-6767c231-2dcb-4d19-ae7c-5b026d48ed26-ae939699-528f-4716-8d38-8dc982cef0b3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.467198] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-6767c231-2dcb-4d19-ae7c-5b026d48ed26-ae939699-528f-4716-8d38-8dc982cef0b3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.528913] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1534.529238] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.529461] env[62816]: DEBUG nova.compute.manager [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1534.530559] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cf3c21-fa17-42be-8409-fc1a9a1852d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.540673] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1534.543634] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dd4d20c2-75de-4871-a3c5-ec9189c7552e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.546247] env[62816]: DEBUG nova.compute.manager [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1534.546891] env[62816]: DEBUG nova.objects.instance [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1534.558462] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1534.558462] env[62816]: value = "task-1788345" [ 1534.558462] env[62816]: _type = "Task" [ 1534.558462] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.565103] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788345, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.628048] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788344, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.636141] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 666d5105-ee2e-4691-b13c-bd7feb045959] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1534.832747] env[62816]: DEBUG nova.compute.utils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1534.837727] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1534.837996] env[62816]: DEBUG nova.network.neutron [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1534.896850] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.897258] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Instance network_info: |[{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1534.897761] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:42:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b2b9d44-f66e-428f-a75c-6e213ebdb364', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1534.908200] env[62816]: DEBUG oslo.service.loopingcall [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1534.908200] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1534.908200] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56302c5b-6a85-415c-89b9-dcf415367875 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.931041] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1534.931041] env[62816]: value = "task-1788346" [ 1534.931041] env[62816]: _type = "Task" [ 1534.931041] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.940018] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788346, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.941785] env[62816]: DEBUG nova.policy [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cbf5bee5983340fb8cd13001782081f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10abe50d596248d8835e04e95e122227', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1534.970565] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.970959] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.971922] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eac00e7-623e-4b4d-88d0-328b1ecaaed0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.991142] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1645da91-d8dc-452b-b1d5-a913c22893a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.027418] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Reconfiguring VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1535.027589] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41734b2a-54ad-4818-ae23-873cb4083261 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.047949] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1535.047949] env[62816]: value = "task-1788347" [ 1535.047949] env[62816]: _type = "Task" [ 1535.047949] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.059253] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.059815] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1535.064588] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bb48de80-110f-4028-a915-5aab6d31c4dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.072023] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788345, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.074894] env[62816]: DEBUG oslo_vmware.api [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1535.074894] env[62816]: value = "task-1788348" [ 1535.074894] env[62816]: _type = "Task" [ 1535.074894] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.092023] env[62816]: DEBUG oslo_vmware.api [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.132639] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788344, 'name': CreateSnapshot_Task, 'duration_secs': 0.825007} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.132639] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1535.133302] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba336440-d9b3-4446-9413-556d4efc9644 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.143214] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 99bd7579-7097-41df-a8c0-e12a3863a3dc] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1535.338996] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1535.440534] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788346, 'name': CreateVM_Task, 'duration_secs': 0.397402} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.441393] env[62816]: DEBUG nova.network.neutron [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Successfully created port: 7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1535.443194] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1535.446885] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.447067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.447389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1535.448095] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59fe4e89-80f8-4c16-8011-e95ab09fa7bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.453027] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1535.453027] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c42f01-abbf-9824-e5b6-157a23d41f6a" [ 1535.453027] env[62816]: _type = "Task" [ 1535.453027] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.463012] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c42f01-abbf-9824-e5b6-157a23d41f6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.479084] env[62816]: DEBUG nova.compute.manager [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Received event network-changed-5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1535.479284] env[62816]: DEBUG nova.compute.manager [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Refreshing instance network info cache due to event network-changed-5b2b9d44-f66e-428f-a75c-6e213ebdb364. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1535.479626] env[62816]: DEBUG oslo_concurrency.lockutils [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] Acquiring lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.479626] env[62816]: DEBUG oslo_concurrency.lockutils [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] Acquired lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.479879] env[62816]: DEBUG nova.network.neutron [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Refreshing network info cache for port 5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1535.558142] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.574981] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788345, 'name': CreateSnapshot_Task, 'duration_secs': 0.769833} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.575384] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1535.579110] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008bd63e-4873-4a63-a0b6-6a0053d1634f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.594025] env[62816]: DEBUG oslo_vmware.api [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788348, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.656634] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1535.659852] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1535.660293] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances with incomplete migration {{(pid=62816) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1535.662421] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-17dd3045-3fd7-4cdc-a6fc-f5387cabfd17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.676024] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1535.676024] env[62816]: value = "task-1788349" [ 1535.676024] env[62816]: _type = "Task" [ 1535.676024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.685915] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788349, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.765319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "48b74d52-e764-4d14-b372-fc34872205dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.765642] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "48b74d52-e764-4d14-b372-fc34872205dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.765876] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "48b74d52-e764-4d14-b372-fc34872205dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.766094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "48b74d52-e764-4d14-b372-fc34872205dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1535.766275] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "48b74d52-e764-4d14-b372-fc34872205dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.772559] env[62816]: INFO nova.compute.manager [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Terminating instance [ 1535.777354] env[62816]: DEBUG nova.compute.manager [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1535.777570] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1535.778451] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc83da2-0fed-4034-aa15-5b92f9ae60f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.791255] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1535.791255] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-530d83ed-a47a-4dea-a48d-4fe6b4f1de63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.796708] env[62816]: DEBUG oslo_vmware.api [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1535.796708] env[62816]: value = "task-1788350" [ 1535.796708] env[62816]: _type = "Task" [ 1535.796708] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.815295] env[62816]: DEBUG oslo_vmware.api [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.926349] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a814750-01aa-4fa9-b578-a528018a96cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.937019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0cd0f0-7b36-42b6-a1c8-19c8928079b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.978743] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b38bd1c-bb65-4a3b-8ac5-cacdd62767c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.993418] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c42f01-abbf-9824-e5b6-157a23d41f6a, 'name': SearchDatastore_Task, 'duration_secs': 0.00969} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.996438] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.996612] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1535.997066] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.997263] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.997524] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.998150] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4eafa6cd-b14f-4af1-be0a-29525a6f6669 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.001095] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd652b6-6603-4769-a7d3-802c330de258 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.019075] env[62816]: DEBUG nova.compute.provider_tree [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.021841] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1536.022095] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1536.023107] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96df405a-dc79-44ff-ae03-6c88536dccbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.028619] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1536.028619] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ceac32-716b-b9d3-9813-42114634ed19" [ 1536.028619] env[62816]: _type = "Task" [ 1536.028619] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.037254] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ceac32-716b-b9d3-9813-42114634ed19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.058859] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.086815] env[62816]: DEBUG oslo_vmware.api [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788348, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.110169] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1536.110546] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d3eb4280-9910-44ee-8499-b2977ba89101 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.120656] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1536.120656] env[62816]: value = "task-1788351" [ 1536.120656] env[62816]: _type = "Task" [ 1536.120656] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.130550] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788351, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.189104] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788349, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.321357] env[62816]: DEBUG oslo_vmware.api [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788350, 'name': PowerOffVM_Task, 'duration_secs': 0.433659} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.321812] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1536.322084] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1536.322613] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef086063-897c-4fa4-9183-1065483f0ec6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.350801] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1536.376030] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1536.376330] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1536.376490] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1536.376672] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1536.376818] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1536.376985] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1536.377310] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1536.377354] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1536.377517] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1536.377707] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1536.377886] env[62816]: DEBUG nova.virt.hardware [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1536.378890] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3ec529-0fc6-41e4-950d-34e49f5fb04c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.388189] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ae7b43-f0b7-4589-a340-0795aaf663b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.405323] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1536.405664] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1536.405947] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Deleting the datastore file [datastore1] 48b74d52-e764-4d14-b372-fc34872205dd {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1536.406328] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72bb4129-6061-4bc4-a58b-491b3b0171e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.413397] env[62816]: DEBUG oslo_vmware.api [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for the task: (returnval){ [ 1536.413397] env[62816]: value = "task-1788353" [ 1536.413397] env[62816]: _type = "Task" [ 1536.413397] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.425146] env[62816]: DEBUG oslo_vmware.api [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.431974] env[62816]: DEBUG nova.network.neutron [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updated VIF entry in instance network info cache for port 5b2b9d44-f66e-428f-a75c-6e213ebdb364. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1536.432385] env[62816]: DEBUG nova.network.neutron [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.524576] env[62816]: DEBUG nova.scheduler.client.report [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.542023] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ceac32-716b-b9d3-9813-42114634ed19, 'name': SearchDatastore_Task, 'duration_secs': 0.010702} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.542363] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21f16919-f22c-40e1-9e90-d25888f7d507 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.551807] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1536.551807] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521cd4a6-6f33-6eab-0c05-564898a7b4b0" [ 1536.551807] env[62816]: _type = "Task" [ 1536.551807] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.570407] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521cd4a6-6f33-6eab-0c05-564898a7b4b0, 'name': SearchDatastore_Task, 'duration_secs': 0.012539} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.571030] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.571030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.571521] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1536.573024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a7b38b6-3af7-4757-9d6c-1a85141f3e98 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.581799] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1536.581799] env[62816]: value = "task-1788354" [ 1536.581799] env[62816]: _type = "Task" [ 1536.581799] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.594521] env[62816]: DEBUG oslo_vmware.api [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788348, 'name': PowerOffVM_Task, 'duration_secs': 1.24243} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.595378] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1536.595584] env[62816]: DEBUG nova.compute.manager [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1536.596592] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a597527-a882-4411-a092-f5c233ec6d30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.604941] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.634378] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788351, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.687885] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788349, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.924015] env[62816]: DEBUG oslo_vmware.api [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Task: {'id': task-1788353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220773} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.924321] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1536.924522] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1536.924798] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1536.925098] env[62816]: INFO nova.compute.manager [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1536.927791] env[62816]: DEBUG oslo.service.loopingcall [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1536.927791] env[62816]: DEBUG nova.compute.manager [-] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1536.927791] env[62816]: DEBUG nova.network.neutron [-] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1536.935692] env[62816]: DEBUG oslo_concurrency.lockutils [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] Releasing lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.936054] env[62816]: DEBUG nova.compute.manager [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received event network-changed-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1536.936305] env[62816]: DEBUG nova.compute.manager [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing instance network info cache due to event network-changed-fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1536.936565] env[62816]: DEBUG oslo_concurrency.lockutils [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1536.936716] env[62816]: DEBUG oslo_concurrency.lockutils [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1536.937447] env[62816]: DEBUG nova.network.neutron [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing network info cache for port fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.034687] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.035514] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1537.041351] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.425s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.041640] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.051086] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.542s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.051469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.053959] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.193s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.054215] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.058074] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.079s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.062827] env[62816]: INFO nova.compute.claims [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1537.084947] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.098530] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788354, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.105526] env[62816]: INFO nova.scheduler.client.report [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleted allocations for instance cf6ff174-1324-42bd-a77a-905b9a333c27 [ 1537.118085] env[62816]: INFO nova.scheduler.client.report [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Deleted allocations for instance fb84cb48-d1a1-4eec-adb8-8edc585263df [ 1537.119755] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46e59b51-0291-4a08-a0c8-286b237b608d tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.590s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.145758] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788351, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.151029] env[62816]: INFO nova.scheduler.client.report [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted allocations for instance b409568f-6e04-4218-8a7b-1bbf785115c3 [ 1537.190199] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788349, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.446035] env[62816]: DEBUG nova.objects.instance [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.544528] env[62816]: DEBUG nova.compute.utils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1537.546465] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1537.546663] env[62816]: DEBUG nova.network.neutron [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1537.567366] env[62816]: DEBUG nova.network.neutron [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Successfully updated port: 7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1537.590198] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.603828] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621223} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.603828] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1537.603828] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1537.603828] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7f57e87-ab1d-4a00-856e-edacad3aab6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.616047] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1537.616047] env[62816]: value = "task-1788355" [ 1537.616047] env[62816]: _type = "Task" [ 1537.616047] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.621777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9f8dbb81-e091-44a5-86ad-db0522f1f251 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "cf6ff174-1324-42bd-a77a-905b9a333c27" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.463s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.648757] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788355, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.651268] env[62816]: DEBUG nova.policy [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21ed3abad90741799db9f998a15c7787', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f016ab6a03848ba8014647f483f0b92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1537.654197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3d0efce4-6543-48c5-8c05-c1bc9453c15a tempest-ServersTestFqdnHostnames-850098616 tempest-ServersTestFqdnHostnames-850098616-project-member] Lock "fb84cb48-d1a1-4eec-adb8-8edc585263df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.387s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.666254] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788351, 'name': CloneVM_Task, 'duration_secs': 1.471933} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.666755] env[62816]: DEBUG oslo_concurrency.lockutils [None req-09161d6d-dd01-4207-b085-565b473e8a87 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "b409568f-6e04-4218-8a7b-1bbf785115c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.904s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.667794] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Created linked-clone VM from snapshot [ 1537.668789] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26aabd7b-652f-4df6-ba8f-af2313c7a205 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.679389] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Uploading image ee4d84ac-b57d-4c36-bdf8-25474a241232 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1537.691576] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788349, 'name': CloneVM_Task, 'duration_secs': 1.569538} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.695128] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1537.695128] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Created linked-clone VM from snapshot [ 1537.695128] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e388bad9-250b-4a63-827d-92f43950e761 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.697251] env[62816]: DEBUG nova.compute.manager [req-a92781b0-5518-4bd4-8974-3e73d39a1855 req-3bb124df-e94b-44f3-949c-294bd5f305cf service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Received event network-vif-plugged-7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.697464] env[62816]: DEBUG oslo_concurrency.lockutils [req-a92781b0-5518-4bd4-8974-3e73d39a1855 req-3bb124df-e94b-44f3-949c-294bd5f305cf service nova] Acquiring lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.697654] env[62816]: DEBUG oslo_concurrency.lockutils [req-a92781b0-5518-4bd4-8974-3e73d39a1855 req-3bb124df-e94b-44f3-949c-294bd5f305cf service nova] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.697841] env[62816]: DEBUG oslo_concurrency.lockutils [req-a92781b0-5518-4bd4-8974-3e73d39a1855 req-3bb124df-e94b-44f3-949c-294bd5f305cf service nova] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.698703] env[62816]: DEBUG nova.compute.manager [req-a92781b0-5518-4bd4-8974-3e73d39a1855 req-3bb124df-e94b-44f3-949c-294bd5f305cf service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] No waiting events found dispatching network-vif-plugged-7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1537.698703] env[62816]: WARNING nova.compute.manager [req-a92781b0-5518-4bd4-8974-3e73d39a1855 req-3bb124df-e94b-44f3-949c-294bd5f305cf service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Received unexpected event network-vif-plugged-7ade2505-d2f7-45f5-b360-364fc2c58b96 for instance with vm_state building and task_state spawning. [ 1537.701224] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90ca3024-d392-42e9-ae09-ac3bedaf1a55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.711161] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Uploading image 44a8a93c-701f-4fba-8aad-530bd31b3f1b {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1537.717174] env[62816]: DEBUG nova.compute.manager [req-06f7af66-0d99-4123-87b2-f1a41a9eea68 req-440f26b6-7af6-48ef-af89-e461be9859e7 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Received event network-vif-deleted-bc9889f6-a785-436a-a67e-892333ea07e1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.717453] env[62816]: INFO nova.compute.manager [req-06f7af66-0d99-4123-87b2-f1a41a9eea68 req-440f26b6-7af6-48ef-af89-e461be9859e7 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Neutron deleted interface bc9889f6-a785-436a-a67e-892333ea07e1; detaching it from the instance and deleting it from the info cache [ 1537.717649] env[62816]: DEBUG nova.network.neutron [req-06f7af66-0d99-4123-87b2-f1a41a9eea68 req-440f26b6-7af6-48ef-af89-e461be9859e7 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.719167] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1537.719167] env[62816]: value = "task-1788356" [ 1537.719167] env[62816]: _type = "Task" [ 1537.719167] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.729311] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788356, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.745336] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1537.745336] env[62816]: value = "vm-371037" [ 1537.745336] env[62816]: _type = "VirtualMachine" [ 1537.745336] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1537.745527] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-605869b5-3092-47db-9a11-4b803e3e5940 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.752454] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lease: (returnval){ [ 1537.752454] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52670609-e8e0-56f1-a7b0-a4a017296235" [ 1537.752454] env[62816]: _type = "HttpNfcLease" [ 1537.752454] env[62816]: } obtained for exporting VM: (result){ [ 1537.752454] env[62816]: value = "vm-371037" [ 1537.752454] env[62816]: _type = "VirtualMachine" [ 1537.752454] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1537.752760] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the lease: (returnval){ [ 1537.752760] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52670609-e8e0-56f1-a7b0-a4a017296235" [ 1537.752760] env[62816]: _type = "HttpNfcLease" [ 1537.752760] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1537.764599] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1537.764599] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52670609-e8e0-56f1-a7b0-a4a017296235" [ 1537.764599] env[62816]: _type = "HttpNfcLease" [ 1537.764599] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1537.955343] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.955676] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.955864] env[62816]: DEBUG nova.network.neutron [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1537.956066] env[62816]: DEBUG nova.objects.instance [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'info_cache' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.998537] env[62816]: DEBUG nova.network.neutron [-] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.002539] env[62816]: DEBUG nova.network.neutron [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updated VIF entry in instance network info cache for port fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1538.002604] env[62816]: DEBUG nova.network.neutron [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.035614] env[62816]: WARNING oslo_messaging._drivers.amqpdriver [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1538.050880] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1538.078223] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "refresh_cache-ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.078223] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquired lock "refresh_cache-ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.078428] env[62816]: DEBUG nova.network.neutron [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1538.090710] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.128886] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788355, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07185} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.129210] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1538.131047] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a628a11e-8e2a-4626-8b0e-4f94dd1a71f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.154729] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1538.157898] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b2bfb9-9db8-4c3e-a665-3b24930a7beb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.178894] env[62816]: DEBUG nova.network.neutron [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Successfully created port: e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1538.183027] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1538.183027] env[62816]: value = "task-1788358" [ 1538.183027] env[62816]: _type = "Task" [ 1538.183027] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.195261] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788358, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.221957] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7853c1f-aada-4eb1-b406-a75384e33b0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.234117] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788356, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.241974] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88f8d42-54dd-4f48-99ba-cc960f2677b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.260320] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1538.260320] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52670609-e8e0-56f1-a7b0-a4a017296235" [ 1538.260320] env[62816]: _type = "HttpNfcLease" [ 1538.260320] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1538.260574] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1538.260574] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52670609-e8e0-56f1-a7b0-a4a017296235" [ 1538.260574] env[62816]: _type = "HttpNfcLease" [ 1538.260574] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1538.261308] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96af3852-f75d-4643-ae25-fb94f2c2cfdc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.279124] env[62816]: DEBUG nova.compute.manager [req-06f7af66-0d99-4123-87b2-f1a41a9eea68 req-440f26b6-7af6-48ef-af89-e461be9859e7 service nova] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Detach interface failed, port_id=bc9889f6-a785-436a-a67e-892333ea07e1, reason: Instance 48b74d52-e764-4d14-b372-fc34872205dd could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1538.287845] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52260871-7841-1e8c-bd23-328e548f72e8/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1538.288057] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52260871-7841-1e8c-bd23-328e548f72e8/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1538.381264] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-142bc954-da18-41d7-930a-b5a8baac91f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.463159] env[62816]: DEBUG nova.objects.base [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Object Instance<1c3392d3-cfb0-47c6-9366-8c363ad21297> lazy-loaded attributes: flavor,info_cache {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1538.504937] env[62816]: INFO nova.compute.manager [-] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Took 1.58 seconds to deallocate network for instance. [ 1538.505471] env[62816]: DEBUG oslo_concurrency.lockutils [req-f186d705-7e55-4d9c-a81e-56cffe6278a3 req-73c2044c-ca21-4413-a41d-20b4d842219c service nova] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.537493] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.537999] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.538639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "42093232-a4e5-4cc3-ab1c-a0023a91e102-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1538.538639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1538.538639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.540722] env[62816]: INFO nova.compute.manager [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Terminating instance [ 1538.545862] env[62816]: DEBUG nova.compute.manager [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1538.546067] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1538.547295] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488134cc-6a3b-46a7-a8c8-48042285796f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.565724] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1538.568824] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28c5fa17-9ebf-4d11-a4fe-831f03d6f4fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.580999] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1538.580999] env[62816]: value = "task-1788359" [ 1538.580999] env[62816]: _type = "Task" [ 1538.580999] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.596908] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.604329] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.632262] env[62816]: DEBUG nova.network.neutron [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1538.704886] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788358, 'name': ReconfigVM_Task, 'duration_secs': 0.311567} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.705313] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1538.706055] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf864497-f81e-4eba-a517-8ced0ba7006e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.712934] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1538.712934] env[62816]: value = "task-1788360" [ 1538.712934] env[62816]: _type = "Task" [ 1538.712934] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.727150] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788360, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.739550] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788356, 'name': Destroy_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.747556] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70402d47-ec7c-4419-9822-1619feffcfc8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.755596] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b591750-d98c-45a8-877a-028854306d13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.795193] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc5b424-15ba-456d-abcb-45c8a39b69d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.809649] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4cc9005-cf51-44e6-bc12-e0be8ad9e99f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.823623] env[62816]: DEBUG nova.compute.provider_tree [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.897062] env[62816]: DEBUG nova.network.neutron [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Updating instance_info_cache with network_info: [{"id": "7ade2505-d2f7-45f5-b360-364fc2c58b96", "address": "fa:16:3e:21:42:84", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ade2505-d2", "ovs_interfaceid": "7ade2505-d2f7-45f5-b360-364fc2c58b96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.016582] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.071270] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1539.089559] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.094573] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.229791] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788360, 'name': Rename_Task, 'duration_secs': 0.15022} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.235221] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1539.237020] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbd53237-cbe3-4050-a783-fa4f7c94c245 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.242983] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788356, 'name': Destroy_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.244748] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1539.244748] env[62816]: value = "task-1788361" [ 1539.244748] env[62816]: _type = "Task" [ 1539.244748] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.256953] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.328571] env[62816]: DEBUG nova.scheduler.client.report [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1539.400657] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Releasing lock "refresh_cache-ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.400657] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Instance network_info: |[{"id": "7ade2505-d2f7-45f5-b360-364fc2c58b96", "address": "fa:16:3e:21:42:84", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ade2505-d2", "ovs_interfaceid": "7ade2505-d2f7-45f5-b360-364fc2c58b96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1539.405523] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:42:84', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ade2505-d2f7-45f5-b360-364fc2c58b96', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1539.413775] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Creating folder: Project (10abe50d596248d8835e04e95e122227). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.418215] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33e633ff-44e2-47f8-b451-90098dfeea75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.434212] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Created folder: Project (10abe50d596248d8835e04e95e122227) in parent group-v370905. [ 1539.434212] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Creating folder: Instances. Parent ref: group-v371039. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1539.434212] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f791a6ef-098b-46bb-bf35-9eb49d1a8a61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.445213] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Created folder: Instances in parent group-v371039. [ 1539.445213] env[62816]: DEBUG oslo.service.loopingcall [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1539.445213] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1539.445213] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec8cabb2-81a7-4dcb-aa92-9eaf28c8f270 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.467371] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1539.467371] env[62816]: value = "task-1788364" [ 1539.467371] env[62816]: _type = "Task" [ 1539.467371] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.477233] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788364, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.513216] env[62816]: DEBUG nova.network.neutron [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [{"id": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "address": "fa:16:3e:d6:81:d5", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1110b9ce-76", "ovs_interfaceid": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.595917] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.602753] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.740730] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788356, 'name': Destroy_Task, 'duration_secs': 1.88119} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.741455] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Destroyed the VM [ 1539.741709] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1539.742025] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-31352be9-3a6e-410c-9623-abeb2e08bd15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.754786] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1539.754786] env[62816]: value = "task-1788365" [ 1539.754786] env[62816]: _type = "Task" [ 1539.754786] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.762878] env[62816]: DEBUG oslo_vmware.api [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788361, 'name': PowerOnVM_Task, 'duration_secs': 0.48302} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.764070] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1539.764377] env[62816]: INFO nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Took 7.34 seconds to spawn the instance on the hypervisor. [ 1539.764655] env[62816]: DEBUG nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1539.765534] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7660517-4e87-4998-a771-5643449b20f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.771411] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788365, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.834981] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.779s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.835628] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1539.843039] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.226s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.843039] env[62816]: INFO nova.compute.claims [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.983841] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788364, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.017737] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.092404] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.098700] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788359, 'name': PowerOffVM_Task, 'duration_secs': 1.390664} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.101906] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1540.102294] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1540.103206] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea68e2f2-bde5-4f99-822e-0c835a7304a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.205725] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1540.206009] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1540.206361] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleting the datastore file [datastore1] 42093232-a4e5-4cc3-ab1c-a0023a91e102 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1540.206785] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2995f2a4-14b6-4315-8e88-6ba1aeca703f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.213097] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for the task: (returnval){ [ 1540.213097] env[62816]: value = "task-1788367" [ 1540.213097] env[62816]: _type = "Task" [ 1540.213097] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.222349] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788367, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.266436] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788365, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.285589] env[62816]: INFO nova.compute.manager [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Took 43.08 seconds to build instance. [ 1540.356868] env[62816]: DEBUG nova.compute.utils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.358942] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1540.359145] env[62816]: DEBUG nova.network.neutron [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1540.369310] env[62816]: DEBUG nova.network.neutron [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Successfully updated port: e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1540.448798] env[62816]: DEBUG nova.policy [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ffca35ab8614990be3ff2c9697d424f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef0dee852154407fa3201a860c55bf3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1540.480265] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788364, 'name': CreateVM_Task, 'duration_secs': 0.537623} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.480463] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1540.481467] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.481693] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.482050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1540.482301] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-969d2d42-c7ae-4440-a606-80df434c20ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.489358] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1540.489358] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521a42c9-89ba-76c0-9c00-420356670d9e" [ 1540.489358] env[62816]: _type = "Task" [ 1540.489358] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.500399] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521a42c9-89ba-76c0-9c00-420356670d9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.527105] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1540.528396] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8579303c-996a-47a8-ad8d-a30f4a557fd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.537972] env[62816]: DEBUG oslo_vmware.api [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1540.537972] env[62816]: value = "task-1788368" [ 1540.537972] env[62816]: _type = "Task" [ 1540.537972] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.549811] env[62816]: DEBUG oslo_vmware.api [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.589292] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.732218] env[62816]: DEBUG oslo_vmware.api [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Task: {'id': task-1788367, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271836} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.732218] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1540.732218] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1540.732218] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1540.732218] env[62816]: INFO nova.compute.manager [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1540.732218] env[62816]: DEBUG oslo.service.loopingcall [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.732218] env[62816]: DEBUG nova.compute.manager [-] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1540.732218] env[62816]: DEBUG nova.network.neutron [-] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1540.767971] env[62816]: DEBUG oslo_vmware.api [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788365, 'name': RemoveSnapshot_Task, 'duration_secs': 0.958247} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.768603] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1540.787673] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3ead3d2-6b52-4dea-b3d3-f720234581bf tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.379s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.864380] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1540.873207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "refresh_cache-1056fc6e-af1e-4d63-a9ce-9ade4dd73891" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.873207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "refresh_cache-1056fc6e-af1e-4d63-a9ce-9ade4dd73891" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.873207] env[62816]: DEBUG nova.network.neutron [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1540.874912] env[62816]: DEBUG nova.network.neutron [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Successfully created port: 0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.001501] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521a42c9-89ba-76c0-9c00-420356670d9e, 'name': SearchDatastore_Task, 'duration_secs': 0.012839} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.005158] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.005519] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1541.009224] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.009469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.009698] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1541.010347] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bf8f3b0-73dc-4bf2-8eae-7b41c124f4ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.023483] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1541.023694] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1541.024589] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3c06f08-0976-4519-bb91-c861c0677843 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.033743] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1541.033743] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52123703-55ea-b091-97b4-b30768f728a8" [ 1541.033743] env[62816]: _type = "Task" [ 1541.033743] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.047312] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52123703-55ea-b091-97b4-b30768f728a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.056363] env[62816]: DEBUG oslo_vmware.api [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788368, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.091453] env[62816]: DEBUG oslo_vmware.api [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788347, 'name': ReconfigVM_Task, 'duration_secs': 5.808731} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.095389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.096211] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Reconfigured VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1541.277472] env[62816]: WARNING nova.compute.manager [None req-02125a61-59e2-4a01-8f09-346e9e036c4d tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Image not found during snapshot: nova.exception.ImageNotFound: Image ee4d84ac-b57d-4c36-bdf8-25474a241232 could not be found. [ 1541.422168] env[62816]: DEBUG nova.network.neutron [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1541.454108] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26695d2b-5316-4550-b3b9-2a4c3cb31f40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.467313] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81222cb-29f6-4980-813c-d3ccf947720c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.507148] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908c9f96-698f-4164-b609-659fcd55a930 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.520275] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e315e5-e72f-4310-a678-b6f3a4e675ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.525652] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1541.525979] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1541.526102] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.526302] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1541.526447] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.526590] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1541.527386] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1541.527386] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1541.527386] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1541.527386] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1541.527703] env[62816]: DEBUG nova.virt.hardware [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1541.532293] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed1475f-9dfb-4042-936e-8fa44f4afd7b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.547393] env[62816]: DEBUG nova.compute.provider_tree [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.553646] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525562e8-59ce-bbd5-8588-86fbecbba4bf/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1541.557472] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9894aa11-c148-4d55-9192-cfa54bc7ce2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.564023] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d607d82-b1dd-4b61-8950-d43170a4af4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.571171] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52123703-55ea-b091-97b4-b30768f728a8, 'name': SearchDatastore_Task, 'duration_secs': 0.018799} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.576651] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a94cf146-662e-453a-bc2c-8dfad3af2fa5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.579324] env[62816]: DEBUG oslo_vmware.api [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788368, 'name': PowerOnVM_Task, 'duration_secs': 0.5379} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.579560] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525562e8-59ce-bbd5-8588-86fbecbba4bf/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1541.579707] env[62816]: ERROR oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525562e8-59ce-bbd5-8588-86fbecbba4bf/disk-0.vmdk due to incomplete transfer. [ 1541.587829] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1541.588075] env[62816]: DEBUG nova.compute.manager [None req-72c97adc-1907-401c-9936-20401eedf971 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1541.588369] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a109c430-dd9e-4b4a-84bc-0e2e0bfd55ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.591552] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e36020e-1614-493c-88c8-bd2c601e97a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.595974] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1541.595974] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52778f42-65ad-b66f-a64a-c8fd448c1c8d" [ 1541.595974] env[62816]: _type = "Task" [ 1541.595974] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.607886] env[62816]: DEBUG oslo_vmware.rw_handles [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525562e8-59ce-bbd5-8588-86fbecbba4bf/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1541.607886] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Uploaded image f159b3d5-89f6-46c6-809c-8a238179a62d to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1541.609941] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1541.613406] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-477b5d69-6c17-41e3-a3a8-39a37d170fc6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.618210] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52778f42-65ad-b66f-a64a-c8fd448c1c8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.622795] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1541.622795] env[62816]: value = "task-1788369" [ 1541.622795] env[62816]: _type = "Task" [ 1541.622795] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.632346] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788369, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.675812] env[62816]: DEBUG nova.network.neutron [-] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.681408] env[62816]: DEBUG nova.network.neutron [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Updating instance_info_cache with network_info: [{"id": "e267cdb4-076e-4451-8184-3f42be642a6e", "address": "fa:16:3e:97:3e:e2", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape267cdb4-07", "ovs_interfaceid": "e267cdb4-076e-4451-8184-3f42be642a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.879406] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1541.904736] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1541.905020] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1541.905188] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.905372] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1541.905517] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.905663] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1541.905972] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1541.906149] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1541.906337] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1541.906481] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1541.906653] env[62816]: DEBUG nova.virt.hardware [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1541.907603] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe4b5d9-5cd6-4dca-9f2b-f9476e38e563 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.916174] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd2c329-3350-44ad-90f9-9302341d9c61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.971745] env[62816]: DEBUG nova.compute.manager [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Received event network-changed-7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.971745] env[62816]: DEBUG nova.compute.manager [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Refreshing instance network info cache due to event network-changed-7ade2505-d2f7-45f5-b360-364fc2c58b96. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1541.971745] env[62816]: DEBUG oslo_concurrency.lockutils [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] Acquiring lock "refresh_cache-ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1541.972506] env[62816]: DEBUG oslo_concurrency.lockutils [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] Acquired lock "refresh_cache-ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1541.972506] env[62816]: DEBUG nova.network.neutron [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Refreshing network info cache for port 7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1542.055487] env[62816]: DEBUG nova.compute.manager [req-9a6449b1-3c88-4733-b19e-525be5247a92 req-fce3e6ef-1cc9-4768-b0d5-a46ca77ddcd0 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Received event network-vif-plugged-e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1542.055844] env[62816]: DEBUG oslo_concurrency.lockutils [req-9a6449b1-3c88-4733-b19e-525be5247a92 req-fce3e6ef-1cc9-4768-b0d5-a46ca77ddcd0 service nova] Acquiring lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.056091] env[62816]: DEBUG oslo_concurrency.lockutils [req-9a6449b1-3c88-4733-b19e-525be5247a92 req-fce3e6ef-1cc9-4768-b0d5-a46ca77ddcd0 service nova] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.056273] env[62816]: DEBUG oslo_concurrency.lockutils [req-9a6449b1-3c88-4733-b19e-525be5247a92 req-fce3e6ef-1cc9-4768-b0d5-a46ca77ddcd0 service nova] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.056449] env[62816]: DEBUG nova.compute.manager [req-9a6449b1-3c88-4733-b19e-525be5247a92 req-fce3e6ef-1cc9-4768-b0d5-a46ca77ddcd0 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] No waiting events found dispatching network-vif-plugged-e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1542.056610] env[62816]: WARNING nova.compute.manager [req-9a6449b1-3c88-4733-b19e-525be5247a92 req-fce3e6ef-1cc9-4768-b0d5-a46ca77ddcd0 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Received unexpected event network-vif-plugged-e267cdb4-076e-4451-8184-3f42be642a6e for instance with vm_state building and task_state spawning. [ 1542.062024] env[62816]: DEBUG nova.scheduler.client.report [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1542.113012] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52778f42-65ad-b66f-a64a-c8fd448c1c8d, 'name': SearchDatastore_Task, 'duration_secs': 0.027607} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.113012] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.113012] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056/ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1542.113296] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae04082f-c3f2-4220-87b2-95fc7d8cd705 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.122593] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1542.122593] env[62816]: value = "task-1788370" [ 1542.122593] env[62816]: _type = "Task" [ 1542.122593] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.138255] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.141797] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788369, 'name': Destroy_Task, 'duration_secs': 0.355798} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.141974] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Destroyed the VM [ 1542.142292] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1542.143163] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b4919d72-3b06-44d0-8736-cfb8b2efdf5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.153669] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1542.153669] env[62816]: value = "task-1788371" [ 1542.153669] env[62816]: _type = "Task" [ 1542.153669] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.166997] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788371, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.183523] env[62816]: INFO nova.compute.manager [-] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Took 1.45 seconds to deallocate network for instance. [ 1542.184302] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "refresh_cache-1056fc6e-af1e-4d63-a9ce-9ade4dd73891" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1542.184302] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Instance network_info: |[{"id": "e267cdb4-076e-4451-8184-3f42be642a6e", "address": "fa:16:3e:97:3e:e2", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape267cdb4-07", "ovs_interfaceid": "e267cdb4-076e-4451-8184-3f42be642a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1542.186586] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:3e:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e267cdb4-076e-4451-8184-3f42be642a6e', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1542.194142] env[62816]: DEBUG oslo.service.loopingcall [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1542.198112] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1542.198321] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c899704-6bbf-475f-b5d9-5f0317846b2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.219549] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1542.219549] env[62816]: value = "task-1788372" [ 1542.219549] env[62816]: _type = "Task" [ 1542.219549] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.229512] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788372, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.516321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1542.516622] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1542.516798] env[62816]: DEBUG nova.network.neutron [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1542.568610] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.569215] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1542.572498] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.619s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.576420] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.004s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.579125] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.202s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.579331] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.585710] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.911s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.585977] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.588428] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.631s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.590721] env[62816]: INFO nova.compute.claims [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1542.641477] env[62816]: INFO nova.scheduler.client.report [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Deleted allocations for instance 0b10aca0-950b-46f6-8367-5cb9ea7540c8 [ 1542.650973] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788370, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.653278] env[62816]: INFO nova.scheduler.client.report [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Deleted allocations for instance d16a99df-f092-4d56-9730-852883bbdb70 [ 1542.667124] env[62816]: INFO nova.scheduler.client.report [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Deleted allocations for instance afd02433-0912-44ef-8e0e-71d6ee8fbb41 [ 1542.682298] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788371, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.716187] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.734109] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788372, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.096643] env[62816]: DEBUG nova.compute.utils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.100760] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1543.101447] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1543.108844] env[62816]: DEBUG nova.network.neutron [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Updated VIF entry in instance network info cache for port 7ade2505-d2f7-45f5-b360-364fc2c58b96. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1543.109621] env[62816]: DEBUG nova.network.neutron [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Updating instance_info_cache with network_info: [{"id": "7ade2505-d2f7-45f5-b360-364fc2c58b96", "address": "fa:16:3e:21:42:84", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ade2505-d2", "ovs_interfaceid": "7ade2505-d2f7-45f5-b360-364fc2c58b96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.118377] env[62816]: DEBUG nova.network.neutron [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Successfully updated port: 0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1543.138854] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.753472} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.138956] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056/ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1543.142144] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1543.142144] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2936bff-6d37-4afa-a37a-cc01a5f25e22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.156230] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1543.156230] env[62816]: value = "task-1788373" [ 1543.156230] env[62816]: _type = "Task" [ 1543.156230] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.156786] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dbffd990-ac6c-4edb-a301-a5caac4901c2 tempest-VolumesAssistedSnapshotsTest-47905949 tempest-VolumesAssistedSnapshotsTest-47905949-project-member] Lock "0b10aca0-950b-46f6-8367-5cb9ea7540c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.484s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.180990] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a431b0d9-7fdf-431c-908b-a5f2de881997 tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "d16a99df-f092-4d56-9730-852883bbdb70" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.358s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.186128] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788373, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.186775] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1384b2a-b935-45bb-bfbd-4f6d6b6852bb tempest-ImagesNegativeTestJSON-1162552027 tempest-ImagesNegativeTestJSON-1162552027-project-member] Lock "afd02433-0912-44ef-8e0e-71d6ee8fbb41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.604s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.191871] env[62816]: DEBUG oslo_vmware.api [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788371, 'name': RemoveSnapshot_Task, 'duration_secs': 0.549388} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.192026] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1543.192248] env[62816]: INFO nova.compute.manager [None req-a1a5c11e-8e91-494f-9bf1-a8c5fc647d75 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Took 14.56 seconds to snapshot the instance on the hypervisor. [ 1543.232997] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788372, 'name': CreateVM_Task, 'duration_secs': 0.753062} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.236299] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1543.237154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.237458] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.237831] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1543.238844] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f6f94cf-4097-4c69-9e04-4bb6dab83e50 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.244617] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1543.244617] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524d23cf-083b-b24e-a23e-e1ce20a17f52" [ 1543.244617] env[62816]: _type = "Task" [ 1543.244617] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.246303] env[62816]: DEBUG nova.policy [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c6fc23d6e2d47938776335fbbf6b59e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8e20c8f5bdd64f1d89157aa0b947431e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1543.259553] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524d23cf-083b-b24e-a23e-e1ce20a17f52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.318152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "ba6e94c9-eb58-4040-8e28-f255961e76ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.318152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.318152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "ba6e94c9-eb58-4040-8e28-f255961e76ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.318152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.318152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.319652] env[62816]: INFO nova.compute.manager [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Terminating instance [ 1543.323463] env[62816]: DEBUG nova.compute.manager [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1543.323943] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1543.324905] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24fcc229-ed5f-4180-b2e5-37f108858e73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.334813] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1543.335179] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e52b14c-0af1-4921-9018-4907afa12b46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.344775] env[62816]: DEBUG oslo_vmware.api [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1543.344775] env[62816]: value = "task-1788374" [ 1543.344775] env[62816]: _type = "Task" [ 1543.344775] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.356708] env[62816]: DEBUG oslo_vmware.api [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.606019] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1543.616052] env[62816]: DEBUG oslo_concurrency.lockutils [req-f000c868-7456-49d4-aa22-119b7d8aa3d4 req-f0db8007-6330-41f7-a194-7aba9027bd97 service nova] Releasing lock "refresh_cache-ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.618674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.618820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.618973] env[62816]: DEBUG nova.network.neutron [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1543.651743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.651743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.651980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.652045] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.652469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1543.656358] env[62816]: INFO nova.compute.manager [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Terminating instance [ 1543.659544] env[62816]: DEBUG nova.compute.manager [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1543.659755] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1543.661931] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc812cb9-ee3b-4c9a-bd8f-e3271047e53e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.680510] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788373, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081596} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.684433] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1543.684433] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1543.684433] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce02cb2d-54f3-4dde-9200-e4b48053f4dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.686602] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5082125a-38be-4524-ad2f-0c165379f656 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.717499] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056/ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1543.723427] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1283da0-253e-408f-9384-d29abed578fe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.740064] env[62816]: DEBUG oslo_vmware.api [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1543.740064] env[62816]: value = "task-1788375" [ 1543.740064] env[62816]: _type = "Task" [ 1543.740064] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.749308] env[62816]: INFO nova.network.neutron [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Port ae939699-528f-4716-8d38-8dc982cef0b3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1543.749704] env[62816]: DEBUG nova.network.neutron [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [{"id": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "address": "fa:16:3e:52:b1:2d", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap51d24096-dc", "ovs_interfaceid": "51d24096-dc5e-4a89-a26a-e0cf4eb85e6a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1543.753772] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1543.753772] env[62816]: value = "task-1788376" [ 1543.753772] env[62816]: _type = "Task" [ 1543.753772] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.767835] env[62816]: DEBUG oslo_vmware.api [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788375, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.779464] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788376, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.779811] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524d23cf-083b-b24e-a23e-e1ce20a17f52, 'name': SearchDatastore_Task, 'duration_secs': 0.015963} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.780121] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1543.780413] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1543.780734] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1543.780908] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.781123] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1543.784086] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e7a3d5c-a6c6-45f7-bd5e-b798624d49be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.794492] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1543.794492] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1543.795759] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cb612a6-1639-41f1-bfc6-d553b983fe19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.801422] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1543.801422] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f7e7f0-a811-b0cc-bd27-b7695df5ca1b" [ 1543.801422] env[62816]: _type = "Task" [ 1543.801422] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.812915] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f7e7f0-a811-b0cc-bd27-b7695df5ca1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.856986] env[62816]: DEBUG oslo_vmware.api [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788374, 'name': PowerOffVM_Task, 'duration_secs': 0.191578} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.857298] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1543.857469] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1543.857731] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-433bf52d-6e8c-4984-ad62-c78c7a062d70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.950248] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1543.950248] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1543.950248] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleting the datastore file [datastore1] ba6e94c9-eb58-4040-8e28-f255961e76ca {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1543.950458] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e2ab3ba-5b91-4967-8af2-463723018c7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.959291] env[62816]: DEBUG oslo_vmware.api [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1543.959291] env[62816]: value = "task-1788378" [ 1543.959291] env[62816]: _type = "Task" [ 1543.959291] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.970176] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Successfully created port: 6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1543.978585] env[62816]: DEBUG oslo_vmware.api [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.128023] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca62eb86-8d1c-4abc-a020-ea423b0396b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.134358] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6499b93-7377-4de2-b048-f8b31486e6b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.191570] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8a37e6-d22f-42ac-a014-9ece29fd180e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.195527] env[62816]: DEBUG nova.compute.manager [req-7cd0e012-8a0d-4570-883e-9a806dd95ace req-2623b1d6-6cb6-4485-b9d8-5daa6fed2ffe service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-vif-deleted-ae939699-528f-4716-8d38-8dc982cef0b3 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.202772] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da7612b-427e-477e-8143-249bb940e382 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.219128] env[62816]: DEBUG nova.compute.provider_tree [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.250798] env[62816]: DEBUG oslo_vmware.api [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788375, 'name': PowerOffVM_Task, 'duration_secs': 0.335045} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.251123] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1544.251328] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1544.251622] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69fa9c36-e58f-46b0-b6dc-8757752ffc78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.257958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-6767c231-2dcb-4d19-ae7c-5b026d48ed26" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.271608] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788376, 'name': ReconfigVM_Task, 'duration_secs': 0.299155} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.271951] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Reconfigured VM instance instance-0000002c to attach disk [datastore1] ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056/ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1544.272887] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eaf47cca-2827-44aa-afa7-95815e20f69b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.278983] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1544.278983] env[62816]: value = "task-1788380" [ 1544.278983] env[62816]: _type = "Task" [ 1544.278983] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.290337] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788380, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.315416] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f7e7f0-a811-b0cc-bd27-b7695df5ca1b, 'name': SearchDatastore_Task, 'duration_secs': 0.018499} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.316264] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a1cf272-86db-4057-ad50-f4388d69fa2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.322242] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1544.322242] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207164a-eb4b-e88a-abe4-275ecf0b333c" [ 1544.322242] env[62816]: _type = "Task" [ 1544.322242] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.335245] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5207164a-eb4b-e88a-abe4-275ecf0b333c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.354894] env[62816]: DEBUG nova.network.neutron [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1544.359480] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1544.359773] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1544.360246] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleting the datastore file [datastore1] 6767c231-2dcb-4d19-ae7c-5b026d48ed26 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.360617] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b5ef86be-b303-405f-b240-6e7b28b6bf23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.367465] env[62816]: DEBUG oslo_vmware.api [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1544.367465] env[62816]: value = "task-1788381" [ 1544.367465] env[62816]: _type = "Task" [ 1544.367465] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.377217] env[62816]: DEBUG oslo_vmware.api [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788381, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.441717] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "946dad01-c012-457d-8bfe-6395ff0aaedf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.444035] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.444035] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "946dad01-c012-457d-8bfe-6395ff0aaedf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.448264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.448264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.449913] env[62816]: INFO nova.compute.manager [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Terminating instance [ 1544.454881] env[62816]: DEBUG nova.compute.manager [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1544.455911] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1544.464652] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ad6f2b-82aa-46ce-9308-b9fb65cb0441 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.477600] env[62816]: DEBUG oslo_vmware.api [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.25053} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.481274] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.481274] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.482747] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.482747] env[62816]: INFO nova.compute.manager [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1544.482747] env[62816]: DEBUG oslo.service.loopingcall [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.482747] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1544.483138] env[62816]: DEBUG nova.compute.manager [-] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1544.483293] env[62816]: DEBUG nova.network.neutron [-] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1544.485453] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d3fdf65-5fbb-449b-b45a-e7db2480ffca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.508966] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Received event network-changed-e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1544.509513] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Refreshing instance network info cache due to event network-changed-e267cdb4-076e-4451-8184-3f42be642a6e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1544.509811] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Acquiring lock "refresh_cache-1056fc6e-af1e-4d63-a9ce-9ade4dd73891" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1544.509911] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Acquired lock "refresh_cache-1056fc6e-af1e-4d63-a9ce-9ade4dd73891" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.511905] env[62816]: DEBUG nova.network.neutron [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Refreshing network info cache for port e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1544.584891] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1544.584891] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1544.584891] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleting the datastore file [datastore1] 946dad01-c012-457d-8bfe-6395ff0aaedf {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1544.584891] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f673906-3675-4aac-9d33-21ea08cd36b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.593628] env[62816]: DEBUG oslo_vmware.api [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1544.593628] env[62816]: value = "task-1788383" [ 1544.593628] env[62816]: _type = "Task" [ 1544.593628] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.605401] env[62816]: DEBUG oslo_vmware.api [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788383, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.623310] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1544.655570] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.656123] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.656123] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.656256] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.656296] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.656430] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.656641] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.657914] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.657914] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.657914] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.658360] env[62816]: DEBUG nova.virt.hardware [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.660041] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2549cfaf-5fe9-4132-bf30-a116878ff46f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.669328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54601bae-4896-4d4c-934f-9738b567cfa8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.723237] env[62816]: DEBUG nova.scheduler.client.report [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1544.765448] env[62816]: DEBUG oslo_concurrency.lockutils [None req-076675ec-8389-4a85-8328-0778e94c3d8c tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-6767c231-2dcb-4d19-ae7c-5b026d48ed26-ae939699-528f-4716-8d38-8dc982cef0b3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.298s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.791115] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788380, 'name': Rename_Task, 'duration_secs': 0.182041} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.791115] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1544.791115] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6fa0105-79df-4dc7-b8cf-98f90261f777 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.799206] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1544.799206] env[62816]: value = "task-1788384" [ 1544.799206] env[62816]: _type = "Task" [ 1544.799206] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.808382] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788384, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.828888] env[62816]: DEBUG nova.network.neutron [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updating instance_info_cache with network_info: [{"id": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "address": "fa:16:3e:a1:48:71", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aebe84d-1c", "ovs_interfaceid": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.838510] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5207164a-eb4b-e88a-abe4-275ecf0b333c, 'name': SearchDatastore_Task, 'duration_secs': 0.012656} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.842377] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1544.849164] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1056fc6e-af1e-4d63-a9ce-9ade4dd73891/1056fc6e-af1e-4d63-a9ce-9ade4dd73891.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1544.849164] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f909fade-4d2f-493c-bdbe-5cdfe53293e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.858725] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1544.858725] env[62816]: value = "task-1788385" [ 1544.858725] env[62816]: _type = "Task" [ 1544.858725] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.888478] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788385, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.900244] env[62816]: DEBUG oslo_vmware.api [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788381, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290355} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.900580] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1544.900969] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1544.901423] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1544.901766] env[62816]: INFO nova.compute.manager [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1544.901766] env[62816]: DEBUG oslo.service.loopingcall [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1544.902325] env[62816]: DEBUG nova.compute.manager [-] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1544.902668] env[62816]: DEBUG nova.network.neutron [-] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1545.072025] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Successfully created port: 6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1545.111754] env[62816]: DEBUG oslo_vmware.api [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788383, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285481} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.111754] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1545.111754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1545.111754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1545.111945] env[62816]: INFO nova.compute.manager [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Took 0.66 seconds to destroy the instance on the hypervisor. [ 1545.112174] env[62816]: DEBUG oslo.service.loopingcall [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.112375] env[62816]: DEBUG nova.compute.manager [-] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1545.112468] env[62816]: DEBUG nova.network.neutron [-] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1545.234308] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.235446] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1545.238300] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.259s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.240199] env[62816]: INFO nova.compute.claims [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1545.314596] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788384, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.333760] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1545.333760] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Instance network_info: |[{"id": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "address": "fa:16:3e:a1:48:71", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aebe84d-1c", "ovs_interfaceid": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1545.337433] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:48:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0aebe84d-1c20-4011-90d2-8e7f579b4b29', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.350055] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Creating folder: Project (ef0dee852154407fa3201a860c55bf3c). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1545.350055] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb17bbf0-64db-41c3-84e4-f9989099a3d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.363696] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Created folder: Project (ef0dee852154407fa3201a860c55bf3c) in parent group-v370905. [ 1545.363884] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Creating folder: Instances. Parent ref: group-v371043. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1545.372241] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-031fb0b8-2eb1-4880-8669-b3bc76fb3e89 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.381042] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788385, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.383046] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Created folder: Instances in parent group-v371043. [ 1545.383373] env[62816]: DEBUG oslo.service.loopingcall [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.383680] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.383873] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8c7b45d-287a-4b0c-a896-a52dc9257f04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.408043] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.408043] env[62816]: value = "task-1788388" [ 1545.408043] env[62816]: _type = "Task" [ 1545.408043] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.418719] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788388, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.747882] env[62816]: DEBUG nova.compute.utils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.750541] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1545.750735] env[62816]: DEBUG nova.network.neutron [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.818582] env[62816]: DEBUG oslo_vmware.api [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788384, 'name': PowerOnVM_Task, 'duration_secs': 0.621658} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.818582] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.818582] env[62816]: INFO nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Took 9.47 seconds to spawn the instance on the hypervisor. [ 1545.818993] env[62816]: DEBUG nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1545.819684] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65387d7e-50ba-4fff-9cd3-8dd93df6094c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.850269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.851133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.876933] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788385, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.918311] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788388, 'name': CreateVM_Task, 'duration_secs': 0.457815} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.918549] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1545.919667] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.919784] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.920143] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1545.920452] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbfef294-a366-4987-acbf-b3ec242543b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.925850] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1545.925850] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5267f475-33b7-a56c-b2fb-c5c3dedc3b2f" [ 1545.925850] env[62816]: _type = "Task" [ 1545.925850] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.936021] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5267f475-33b7-a56c-b2fb-c5c3dedc3b2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.949593] env[62816]: DEBUG nova.policy [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c624faef55d44cd8c4871ac08954840', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '934fdecf54c6435999885451fc2204ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1546.013863] env[62816]: DEBUG nova.compute.manager [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1546.154216] env[62816]: DEBUG nova.network.neutron [-] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.174892] env[62816]: DEBUG nova.network.neutron [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Updated VIF entry in instance network info cache for port e267cdb4-076e-4451-8184-3f42be642a6e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1546.175317] env[62816]: DEBUG nova.network.neutron [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Updating instance_info_cache with network_info: [{"id": "e267cdb4-076e-4451-8184-3f42be642a6e", "address": "fa:16:3e:97:3e:e2", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape267cdb4-07", "ovs_interfaceid": "e267cdb4-076e-4451-8184-3f42be642a6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.251129] env[62816]: DEBUG nova.compute.utils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1546.355694] env[62816]: INFO nova.compute.manager [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Took 44.85 seconds to build instance. [ 1546.357851] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1546.381953] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788385, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.438747] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5267f475-33b7-a56c-b2fb-c5c3dedc3b2f, 'name': SearchDatastore_Task, 'duration_secs': 0.018607} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.441627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.441901] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.442226] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.442603] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.442603] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.443615] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6fd62c9-fa70-4fee-ba67-a0f75dc45df4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.454150] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.454404] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.455196] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60c3959c-e2f0-4598-bdc9-cff2dccd9666 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.466318] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1546.466318] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52619ae8-c21c-1e14-9a5a-29c6659a7d8f" [ 1546.466318] env[62816]: _type = "Task" [ 1546.466318] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.476042] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52619ae8-c21c-1e14-9a5a-29c6659a7d8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.532914] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.661316] env[62816]: INFO nova.compute.manager [-] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Took 2.18 seconds to deallocate network for instance. [ 1546.671046] env[62816]: DEBUG nova.network.neutron [-] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.681407] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Releasing lock "refresh_cache-1056fc6e-af1e-4d63-a9ce-9ade4dd73891" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.682258] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Received event network-vif-deleted-cd98f4df-d678-4280-8111-86d76a117d36 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.682258] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Received event network-vif-plugged-0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.682258] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Acquiring lock "f9d9593a-1c25-47a1-98fd-4462a851f134-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.682682] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.682682] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.682757] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] No waiting events found dispatching network-vif-plugged-0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1546.682869] env[62816]: WARNING nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Received unexpected event network-vif-plugged-0aebe84d-1c20-4011-90d2-8e7f579b4b29 for instance with vm_state building and task_state spawning. [ 1546.683048] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Received event network-changed-0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1546.683297] env[62816]: DEBUG nova.compute.manager [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Refreshing instance network info cache due to event network-changed-0aebe84d-1c20-4011-90d2-8e7f579b4b29. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1546.683507] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Acquiring lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.683655] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Acquired lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.684116] env[62816]: DEBUG nova.network.neutron [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Refreshing network info cache for port 0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1546.709914] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52260871-7841-1e8c-bd23-328e548f72e8/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1546.711489] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213615ce-31de-42a4-9b5b-a84e947cfa91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.717834] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52260871-7841-1e8c-bd23-328e548f72e8/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1546.718019] env[62816]: ERROR oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52260871-7841-1e8c-bd23-328e548f72e8/disk-0.vmdk due to incomplete transfer. [ 1546.719180] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-70bc94aa-8e41-4b55-ab33-804ff782e9ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.731401] env[62816]: DEBUG oslo_vmware.rw_handles [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52260871-7841-1e8c-bd23-328e548f72e8/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1546.732260] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Uploaded image 44a8a93c-701f-4fba-8aad-530bd31b3f1b to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1546.736688] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1546.736688] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ead6daa4-dd11-4e76-9fab-5f1c24410b80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.741243] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1546.741243] env[62816]: value = "task-1788389" [ 1546.741243] env[62816]: _type = "Task" [ 1546.741243] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.749896] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788389, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.756676] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1546.776330] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086d5b50-28eb-4892-9b5a-73b452fa88c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.784180] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3aa58f-1d1f-435c-bffb-73ede41e1153 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.817635] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b64917e-78c2-43d7-8877-23bc1c779262 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.822609] env[62816]: DEBUG nova.network.neutron [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Successfully created port: bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.828146] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce5706c-aae2-4cb1-8805-b00222a4231a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.846107] env[62816]: DEBUG nova.compute.provider_tree [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.863763] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c5cbd838-12bb-44af-81d2-9662220484d0 tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.447s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.879117] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788385, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.786096} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.879403] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1056fc6e-af1e-4d63-a9ce-9ade4dd73891/1056fc6e-af1e-4d63-a9ce-9ade4dd73891.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1546.879627] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1546.879960] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48bcfd86-67e4-4392-a504-37b381f83fe2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.884021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1546.887572] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1546.887572] env[62816]: value = "task-1788390" [ 1546.887572] env[62816]: _type = "Task" [ 1546.887572] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.897392] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.919365] env[62816]: DEBUG nova.network.neutron [-] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.979647] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52619ae8-c21c-1e14-9a5a-29c6659a7d8f, 'name': SearchDatastore_Task, 'duration_secs': 0.018867} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.979647] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5ae9e6-21ca-4a80-91d5-a67e15c57ad8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.983945] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1546.983945] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525d2347-98a6-ff3f-8e7c-61e1ab61f8df" [ 1546.983945] env[62816]: _type = "Task" [ 1546.983945] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.992975] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525d2347-98a6-ff3f-8e7c-61e1ab61f8df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.172458] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.174952] env[62816]: INFO nova.compute.manager [-] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Took 2.27 seconds to deallocate network for instance. [ 1547.260547] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788389, 'name': Destroy_Task, 'duration_secs': 0.422613} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.260890] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Destroyed the VM [ 1547.261148] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1547.261331] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dcd03312-d224-41b8-a8fc-811c96897d64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.274141] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1547.274141] env[62816]: value = "task-1788391" [ 1547.274141] env[62816]: _type = "Task" [ 1547.274141] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.285501] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788391, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.349725] env[62816]: DEBUG nova.scheduler.client.report [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1547.398592] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.188973} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.398873] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1547.400365] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5787546-6d92-49fe-80a2-c1c3f837c4f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.429832] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] 1056fc6e-af1e-4d63-a9ce-9ade4dd73891/1056fc6e-af1e-4d63-a9ce-9ade4dd73891.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1547.432825] env[62816]: INFO nova.compute.manager [-] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Took 2.32 seconds to deallocate network for instance. [ 1547.433087] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e3c94ff-439e-466e-8eb4-68ae5994218a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.464701] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1547.464701] env[62816]: value = "task-1788392" [ 1547.464701] env[62816]: _type = "Task" [ 1547.464701] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.474496] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788392, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.497926] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525d2347-98a6-ff3f-8e7c-61e1ab61f8df, 'name': SearchDatastore_Task, 'duration_secs': 0.031169} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.499767] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.500066] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f9d9593a-1c25-47a1-98fd-4462a851f134/f9d9593a-1c25-47a1-98fd-4462a851f134.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1547.501189] env[62816]: DEBUG nova.compute.manager [req-6a6ecf3f-1c16-49cc-9371-a0affd570380 req-31f1e6e3-6389-408d-865a-20fb1ae3fed2 service nova] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Received event network-vif-deleted-51d24096-dc5e-4a89-a26a-e0cf4eb85e6a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1547.501726] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab885432-0f74-4331-852c-d1e8889dc3bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.510338] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1547.510338] env[62816]: value = "task-1788393" [ 1547.510338] env[62816]: _type = "Task" [ 1547.510338] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.523337] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.576015] env[62816]: DEBUG nova.compute.manager [req-9f4dea06-108c-4366-979f-0fddc10dda13 req-338f60d3-27a9-496a-9760-1c3a8e2568b9 service nova] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Received event network-vif-deleted-c4be71b6-096d-43a8-9cf0-74f91c97d74c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1547.576506] env[62816]: DEBUG nova.compute.manager [req-9f4dea06-108c-4366-979f-0fddc10dda13 req-338f60d3-27a9-496a-9760-1c3a8e2568b9 service nova] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Received event network-vif-deleted-24da593b-3452-47bf-a201-21c83c64cae8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1547.632523] env[62816]: DEBUG nova.network.neutron [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updated VIF entry in instance network info cache for port 0aebe84d-1c20-4011-90d2-8e7f579b4b29. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1547.632930] env[62816]: DEBUG nova.network.neutron [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updating instance_info_cache with network_info: [{"id": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "address": "fa:16:3e:a1:48:71", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aebe84d-1c", "ovs_interfaceid": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.684173] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.700587] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Successfully updated port: 6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1547.745830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.746412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.775443] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1547.789061] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788391, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.809521] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:50:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1494884916',id=30,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-644584562',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1547.810045] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1547.810336] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1547.810818] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1547.811129] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1547.811394] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1547.811732] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1547.812031] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1547.812347] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1547.813053] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1547.813496] env[62816]: DEBUG nova.virt.hardware [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1547.819047] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922d55d2-c2f3-46d3-8d7c-cae937c2d34b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.824450] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e5bfd4f-4a86-4c0b-9536-159f70da3fe7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.860609] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.618s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.860609] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1547.865225] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.396s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.865225] env[62816]: INFO nova.compute.claims [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1547.960451] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.978693] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788392, 'name': ReconfigVM_Task, 'duration_secs': 0.309301} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.979898] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Reconfigured VM instance instance-0000002d to attach disk [datastore1] 1056fc6e-af1e-4d63-a9ce-9ade4dd73891/1056fc6e-af1e-4d63-a9ce-9ade4dd73891.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1547.980977] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce386e51-2855-4a89-baac-37c070880286 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.992250] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1547.992250] env[62816]: value = "task-1788394" [ 1547.992250] env[62816]: _type = "Task" [ 1547.992250] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.008651] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788394, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.024448] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788393, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.136360] env[62816]: DEBUG oslo_concurrency.lockutils [req-2bdc0787-845d-490c-b7b1-154d2e047829 req-68b9eaa3-a50d-4a7d-9751-6e7895f64638 service nova] Releasing lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1548.249319] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1548.291171] env[62816]: DEBUG oslo_vmware.api [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788391, 'name': RemoveSnapshot_Task, 'duration_secs': 0.524059} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.291477] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1548.291715] env[62816]: INFO nova.compute.manager [None req-30f5df67-f13f-4a5b-b622-51e148dd7a94 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Took 14.73 seconds to snapshot the instance on the hypervisor. [ 1548.370021] env[62816]: DEBUG nova.compute.utils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1548.374457] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1548.377020] env[62816]: DEBUG nova.network.neutron [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1548.458717] env[62816]: DEBUG nova.policy [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'caa3fab70d854a7b8134ca935692c306', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f4b3d1d951945a7a7f808588e3c7c93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1548.502542] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788394, 'name': Rename_Task, 'duration_secs': 0.209056} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.502832] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1548.503098] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84279ddd-6fea-4d9c-a5f7-5b9ffd42506e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.510728] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1548.510728] env[62816]: value = "task-1788395" [ 1548.510728] env[62816]: _type = "Task" [ 1548.510728] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.537234] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788395, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.537234] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649172} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.537234] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f9d9593a-1c25-47a1-98fd-4462a851f134/f9d9593a-1c25-47a1-98fd-4462a851f134.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1548.537667] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1548.537667] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-232b9daa-04b6-4bf1-a49e-6b64c9bbac43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.544682] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1548.544682] env[62816]: value = "task-1788396" [ 1548.544682] env[62816]: _type = "Task" [ 1548.544682] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.554658] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788396, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.781201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1548.875205] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1549.020890] env[62816]: DEBUG oslo_vmware.api [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788395, 'name': PowerOnVM_Task, 'duration_secs': 0.456112} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.023719] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1549.023950] env[62816]: INFO nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Took 9.95 seconds to spawn the instance on the hypervisor. [ 1549.024386] env[62816]: DEBUG nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1549.026311] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83096128-c41b-4876-95c9-fadd3d97e6ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.052062] env[62816]: DEBUG nova.network.neutron [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Successfully updated port: bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1549.062493] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788396, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089607} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.063566] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1549.064131] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e6e4ac-8c22-4e37-818c-1c153505fb59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.102804] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] f9d9593a-1c25-47a1-98fd-4462a851f134/f9d9593a-1c25-47a1-98fd-4462a851f134.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1549.104432] env[62816]: DEBUG nova.network.neutron [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Successfully created port: fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1549.109998] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40622841-ac2a-439f-8d9b-63845343e629 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.133998] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1549.133998] env[62816]: value = "task-1788397" [ 1549.133998] env[62816]: _type = "Task" [ 1549.133998] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.145285] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788397, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.158211] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.160504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.160504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.160504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.160504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.161329] env[62816]: INFO nova.compute.manager [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Terminating instance [ 1549.163341] env[62816]: DEBUG nova.compute.manager [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1549.163861] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1549.164730] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9fac4b-5af2-4941-a22e-1b2988868e6c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.174159] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1549.174543] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3e28fa93-5128-4745-a63a-e9554069f457 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.182403] env[62816]: DEBUG oslo_vmware.api [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1549.182403] env[62816]: value = "task-1788398" [ 1549.182403] env[62816]: _type = "Task" [ 1549.182403] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.191332] env[62816]: DEBUG oslo_vmware.api [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788398, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.288289] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "d34b7828-542e-4b66-a923-644d0d0f4866" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.288535] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.454701] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc348c4e-b18f-44d6-ae39-b8557ab5f2c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.462513] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a02c90d-d926-4a21-8b63-9cfdba228ae1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.494711] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6bee96-ec46-4548-98a2-5cea404ac5cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.502778] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09961513-65f9-4c7f-8c71-41cd333c00f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.516720] env[62816]: DEBUG nova.compute.provider_tree [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1549.561884] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.561884] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.561884] env[62816]: DEBUG nova.network.neutron [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1549.565922] env[62816]: INFO nova.compute.manager [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Took 46.40 seconds to build instance. [ 1549.644678] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788397, 'name': ReconfigVM_Task, 'duration_secs': 0.390834} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.644973] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Reconfigured VM instance instance-0000002e to attach disk [datastore1] f9d9593a-1c25-47a1-98fd-4462a851f134/f9d9593a-1c25-47a1-98fd-4462a851f134.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1549.645643] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c32545f1-fa2b-4509-85ab-910c446f0ed1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.653237] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1549.653237] env[62816]: value = "task-1788399" [ 1549.653237] env[62816]: _type = "Task" [ 1549.653237] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.665018] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788399, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.692369] env[62816]: DEBUG oslo_vmware.api [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788398, 'name': PowerOffVM_Task, 'duration_secs': 0.222473} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.692826] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1549.692998] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1549.693286] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-84e6031b-161b-427d-974e-7e54523f1fcc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.802488] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1549.802767] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1549.802885] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Deleting the datastore file [datastore1] ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1549.803192] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5076c510-3e17-40ee-971f-ec2f6ffa9c58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.810382] env[62816]: DEBUG oslo_vmware.api [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for the task: (returnval){ [ 1549.810382] env[62816]: value = "task-1788401" [ 1549.810382] env[62816]: _type = "Task" [ 1549.810382] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.819878] env[62816]: DEBUG oslo_vmware.api [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788401, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.889363] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1549.894985] env[62816]: DEBUG nova.compute.manager [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received event network-vif-plugged-6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.895333] env[62816]: DEBUG oslo_concurrency.lockutils [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] Acquiring lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.895623] env[62816]: DEBUG oslo_concurrency.lockutils [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.895920] env[62816]: DEBUG oslo_concurrency.lockutils [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.896144] env[62816]: DEBUG nova.compute.manager [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] No waiting events found dispatching network-vif-plugged-6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1549.896360] env[62816]: WARNING nova.compute.manager [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received unexpected event network-vif-plugged-6a0018bd-3abf-40af-978e-8bdd8a1e59ad for instance with vm_state building and task_state spawning. [ 1549.896568] env[62816]: DEBUG nova.compute.manager [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received event network-changed-6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.896758] env[62816]: DEBUG nova.compute.manager [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Refreshing instance network info cache due to event network-changed-6a0018bd-3abf-40af-978e-8bdd8a1e59ad. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1549.896978] env[62816]: DEBUG oslo_concurrency.lockutils [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] Acquiring lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.897167] env[62816]: DEBUG oslo_concurrency.lockutils [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] Acquired lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.897370] env[62816]: DEBUG nova.network.neutron [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Refreshing network info cache for port 6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1549.924771] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1549.925714] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1549.925951] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1549.926204] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1549.926401] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1549.926587] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1549.926836] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1549.927066] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1549.927288] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1549.927518] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1549.927848] env[62816]: DEBUG nova.virt.hardware [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1549.929210] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323076f1-0bb9-4c6f-a502-7f4fe297ac93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.936148] env[62816]: DEBUG nova.compute.manager [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Received event network-vif-plugged-bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.936496] env[62816]: DEBUG oslo_concurrency.lockutils [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] Acquiring lock "a01e772c-dafe-4091-bae6-f9f59d5c972d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.936886] env[62816]: DEBUG oslo_concurrency.lockutils [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.936939] env[62816]: DEBUG oslo_concurrency.lockutils [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.937127] env[62816]: DEBUG nova.compute.manager [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] No waiting events found dispatching network-vif-plugged-bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1549.937458] env[62816]: WARNING nova.compute.manager [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Received unexpected event network-vif-plugged-bf038e03-93db-4837-8a8e-6b876acd1b7c for instance with vm_state building and task_state spawning. [ 1549.937789] env[62816]: DEBUG nova.compute.manager [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Received event network-changed-bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.937875] env[62816]: DEBUG nova.compute.manager [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Refreshing instance network info cache due to event network-changed-bf038e03-93db-4837-8a8e-6b876acd1b7c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1549.938059] env[62816]: DEBUG oslo_concurrency.lockutils [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] Acquiring lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.946925] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7f76d8-ab29-4dff-b71b-d5c199dbe33a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.020567] env[62816]: DEBUG nova.scheduler.client.report [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1550.068710] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8421280-974a-44a4-8c28-566a0f26d163 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.395s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.106599] env[62816]: DEBUG nova.network.neutron [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1550.167182] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788399, 'name': Rename_Task, 'duration_secs': 0.140448} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.167804] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.168286] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b63675a0-01c4-4c0e-af71-4505ec443ab3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.180174] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1550.180174] env[62816]: value = "task-1788402" [ 1550.180174] env[62816]: _type = "Task" [ 1550.180174] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.193534] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.326151] env[62816]: DEBUG oslo_vmware.api [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Task: {'id': task-1788401, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.411259} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.326707] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1550.327052] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1550.327390] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1550.327688] env[62816]: INFO nova.compute.manager [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1550.328303] env[62816]: DEBUG oslo.service.loopingcall [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1550.328635] env[62816]: DEBUG nova.compute.manager [-] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1550.330324] env[62816]: DEBUG nova.network.neutron [-] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1550.470062] env[62816]: DEBUG nova.network.neutron [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1550.529979] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.530640] env[62816]: DEBUG nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1550.533767] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.053s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.537960] env[62816]: INFO nova.compute.claims [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1550.571729] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1550.690394] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788402, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.694558] env[62816]: DEBUG nova.network.neutron [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updating instance_info_cache with network_info: [{"id": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "address": "fa:16:3e:81:17:26", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf038e03-93", "ovs_interfaceid": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.741020] env[62816]: DEBUG nova.network.neutron [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1550.881060] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Successfully updated port: 6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1551.042062] env[62816]: DEBUG nova.compute.utils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1551.048542] env[62816]: DEBUG nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Not allocating networking since 'none' was specified. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1551.098251] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.195643] env[62816]: DEBUG oslo_vmware.api [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788402, 'name': PowerOnVM_Task, 'duration_secs': 0.746587} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.195989] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1551.197849] env[62816]: INFO nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Took 9.32 seconds to spawn the instance on the hypervisor. [ 1551.198221] env[62816]: DEBUG nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1551.198714] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Releasing lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.198997] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Instance network_info: |[{"id": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "address": "fa:16:3e:81:17:26", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf038e03-93", "ovs_interfaceid": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1551.200407] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d6c094-088a-46e6-bfc4-f461e2f0ba9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.203362] env[62816]: DEBUG oslo_concurrency.lockutils [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] Acquired lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.203547] env[62816]: DEBUG nova.network.neutron [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Refreshing network info cache for port bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1551.204899] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:17:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bf038e03-93db-4837-8a8e-6b876acd1b7c', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1551.213140] env[62816]: DEBUG oslo.service.loopingcall [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1551.213896] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1551.216043] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f74896dc-2a5f-4337-a5e2-3051b1ee7214 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.243382] env[62816]: DEBUG oslo_concurrency.lockutils [req-5e5a0930-1c4e-413c-bbf5-daad53033e88 req-6c1563b4-b94a-48a0-b0ed-5b0767b03212 service nova] Releasing lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.243976] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1551.243976] env[62816]: value = "task-1788403" [ 1551.243976] env[62816]: _type = "Task" [ 1551.243976] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.251844] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788403, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.384616] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.384616] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.384616] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1551.443030] env[62816]: DEBUG nova.network.neutron [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Successfully updated port: fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1551.553414] env[62816]: DEBUG nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1551.675552] env[62816]: DEBUG nova.network.neutron [-] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.755085] env[62816]: INFO nova.compute.manager [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Took 42.80 seconds to build instance. [ 1551.761531] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788403, 'name': CreateVM_Task, 'duration_secs': 0.450621} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.765386] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1551.765746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.765746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.765969] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1551.766247] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d00a3d01-12d9-4c3d-86c2-d4cbd8eff0ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.773961] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1551.773961] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5249b21f-35eb-7745-c66e-97b88f216625" [ 1551.773961] env[62816]: _type = "Task" [ 1551.773961] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.785161] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5249b21f-35eb-7745-c66e-97b88f216625, 'name': SearchDatastore_Task, 'duration_secs': 0.009124} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.790922] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.791185] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1551.791417] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.791563] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.791740] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1551.792465] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66733e5f-f73f-4d18-a54d-64225610e83b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.801169] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1551.801451] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1551.804289] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc833ef2-2787-4cd2-933f-d047f70cf139 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.811172] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1551.811172] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52604660-30d8-f9e0-d69d-169798b54917" [ 1551.811172] env[62816]: _type = "Task" [ 1551.811172] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.819389] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52604660-30d8-f9e0-d69d-169798b54917, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.945078] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "refresh_cache-049e1f97-ab58-4797-a084-f16a7a58e2cc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1551.949282] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired lock "refresh_cache-049e1f97-ab58-4797-a084-f16a7a58e2cc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1551.949282] env[62816]: DEBUG nova.network.neutron [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1551.979136] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.053535] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb96ba9b-5c7b-4c44-9bb7-1ab07d475833 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.061952] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b947f49f-75e1-414b-93e1-6e8296efe296 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.107240] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288ca539-7d28-4c3f-bbb8-407593d3b547 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.114222] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b711b8f3-8790-486b-9520-e1b6592dcbee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.128485] env[62816]: DEBUG nova.compute.provider_tree [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1552.148617] env[62816]: DEBUG nova.compute.manager [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received event network-vif-plugged-6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.148836] env[62816]: DEBUG oslo_concurrency.lockutils [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] Acquiring lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.149082] env[62816]: DEBUG oslo_concurrency.lockutils [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.149197] env[62816]: DEBUG oslo_concurrency.lockutils [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.149405] env[62816]: DEBUG nova.compute.manager [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] No waiting events found dispatching network-vif-plugged-6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1552.149703] env[62816]: WARNING nova.compute.manager [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received unexpected event network-vif-plugged-6203dfb1-2392-4ca7-bcbd-c68af134c40c for instance with vm_state building and task_state spawning. [ 1552.149897] env[62816]: DEBUG nova.compute.manager [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received event network-changed-6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.150089] env[62816]: DEBUG nova.compute.manager [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Refreshing instance network info cache due to event network-changed-6203dfb1-2392-4ca7-bcbd-c68af134c40c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1552.150732] env[62816]: DEBUG oslo_concurrency.lockutils [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] Acquiring lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.179341] env[62816]: INFO nova.compute.manager [-] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Took 1.85 seconds to deallocate network for instance. [ 1552.257576] env[62816]: DEBUG nova.network.neutron [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updated VIF entry in instance network info cache for port bf038e03-93db-4837-8a8e-6b876acd1b7c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1552.258185] env[62816]: DEBUG nova.network.neutron [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updating instance_info_cache with network_info: [{"id": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "address": "fa:16:3e:81:17:26", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf038e03-93", "ovs_interfaceid": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.259076] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f5ef2ca-f3ce-452f-82a2-b38ef08c45f7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.752s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.308303] env[62816]: DEBUG nova.compute.manager [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Received event network-vif-plugged-fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.308515] env[62816]: DEBUG oslo_concurrency.lockutils [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] Acquiring lock "049e1f97-ab58-4797-a084-f16a7a58e2cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.308717] env[62816]: DEBUG oslo_concurrency.lockutils [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1552.308882] env[62816]: DEBUG oslo_concurrency.lockutils [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1552.309794] env[62816]: DEBUG nova.compute.manager [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] No waiting events found dispatching network-vif-plugged-fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1552.310996] env[62816]: WARNING nova.compute.manager [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Received unexpected event network-vif-plugged-fe2be693-eb9b-4f94-b238-992c67bfedda for instance with vm_state building and task_state spawning. [ 1552.310996] env[62816]: DEBUG nova.compute.manager [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Received event network-vif-deleted-7ade2505-d2f7-45f5-b360-364fc2c58b96 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.310996] env[62816]: DEBUG nova.compute.manager [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Received event network-changed-fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1552.310996] env[62816]: DEBUG nova.compute.manager [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Refreshing instance network info cache due to event network-changed-fe2be693-eb9b-4f94-b238-992c67bfedda. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1552.310996] env[62816]: DEBUG oslo_concurrency.lockutils [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] Acquiring lock "refresh_cache-049e1f97-ab58-4797-a084-f16a7a58e2cc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.324883] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52604660-30d8-f9e0-d69d-169798b54917, 'name': SearchDatastore_Task, 'duration_secs': 0.00826} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.325616] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a08bcbfe-47f9-498d-a318-b0e42b8b2d5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.332853] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1552.332853] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52af6a05-5559-7bf1-56e2-7a8b2b067be8" [ 1552.332853] env[62816]: _type = "Task" [ 1552.332853] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.342507] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52af6a05-5559-7bf1-56e2-7a8b2b067be8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.552134] env[62816]: DEBUG nova.network.neutron [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1552.575842] env[62816]: DEBUG nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1552.602816] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1552.603114] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1552.603412] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1552.603648] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1552.603833] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1552.604028] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1552.604324] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1552.604534] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1552.604740] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1552.604942] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1552.605169] env[62816]: DEBUG nova.virt.hardware [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1552.606088] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5469ba2-fb34-413d-8b61-16bd9ca7e516 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.614282] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df0fc5f-b0c4-4968-80c0-ce9e0fd07685 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.628993] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.634889] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Creating folder: Project (e2d1ad8264df4cb0a2d999f8669842ec). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1552.639040] env[62816]: DEBUG nova.scheduler.client.report [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1552.639371] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59a8f386-ceb3-48ca-8338-3986e1c62e48 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.650915] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Created folder: Project (e2d1ad8264df4cb0a2d999f8669842ec) in parent group-v370905. [ 1552.650915] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Creating folder: Instances. Parent ref: group-v371047. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1552.651348] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9061ba95-9900-4306-96c5-3df0247914a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.662134] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Created folder: Instances in parent group-v371047. [ 1552.662455] env[62816]: DEBUG oslo.service.loopingcall [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.662800] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1552.663073] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f9f3368-8074-4aa4-a309-8c17338ccea7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.687974] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.688429] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.688429] env[62816]: value = "task-1788406" [ 1552.688429] env[62816]: _type = "Task" [ 1552.688429] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.696497] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788406, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.760213] env[62816]: DEBUG oslo_concurrency.lockutils [req-0bac46bc-c786-4722-9b1a-5e030e84c4de req-7be5ff63-194a-42ac-b3db-d5fb3945fb18 service nova] Releasing lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.846190] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52af6a05-5559-7bf1-56e2-7a8b2b067be8, 'name': SearchDatastore_Task, 'duration_secs': 0.008822} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.846190] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.846190] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d/a01e772c-dafe-4091-bae6-f9f59d5c972d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1552.846190] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c0403a8-70e4-4361-857a-4836c9e88b15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.848518] env[62816]: DEBUG nova.network.neutron [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Updating instance_info_cache with network_info: [{"id": "6a0018bd-3abf-40af-978e-8bdd8a1e59ad", "address": "fa:16:3e:84:ea:fe", "network": {"id": "2556c13c-4384-4354-b52b-f146337d77c7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1469143001", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0018bd-3a", "ovs_interfaceid": "6a0018bd-3abf-40af-978e-8bdd8a1e59ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "address": "fa:16:3e:b6:0b:7e", "network": {"id": "abd7e484-d1ef-4023-9bb0-b1edeccfdd36", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1182060646", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6203dfb1-23", "ovs_interfaceid": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.854818] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1552.854818] env[62816]: value = "task-1788407" [ 1552.854818] env[62816]: _type = "Task" [ 1552.854818] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.866252] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.874792] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "65e97c6a-5d8f-4241-9095-65a5a6132a69" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1552.875139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.030063] env[62816]: DEBUG nova.network.neutron [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Updating instance_info_cache with network_info: [{"id": "fe2be693-eb9b-4f94-b238-992c67bfedda", "address": "fa:16:3e:59:87:65", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2be693-eb", "ovs_interfaceid": "fe2be693-eb9b-4f94-b238-992c67bfedda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1553.143905] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1553.143905] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1553.147179] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.884s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1553.149734] env[62816]: INFO nova.compute.claims [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1553.206268] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788406, 'name': CreateVM_Task, 'duration_secs': 0.367025} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.207198] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.207688] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.207852] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.208240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.209473] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-647aad14-147a-4d83-aba5-ed8cd022504b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.221029] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1553.221029] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d764d3-9859-c207-5f1d-35843d4e72ec" [ 1553.221029] env[62816]: _type = "Task" [ 1553.221029] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.228206] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d764d3-9859-c207-5f1d-35843d4e72ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.355869] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Releasing lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.356279] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Instance network_info: |[{"id": "6a0018bd-3abf-40af-978e-8bdd8a1e59ad", "address": "fa:16:3e:84:ea:fe", "network": {"id": "2556c13c-4384-4354-b52b-f146337d77c7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1469143001", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0018bd-3a", "ovs_interfaceid": "6a0018bd-3abf-40af-978e-8bdd8a1e59ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "address": "fa:16:3e:b6:0b:7e", "network": {"id": "abd7e484-d1ef-4023-9bb0-b1edeccfdd36", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1182060646", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6203dfb1-23", "ovs_interfaceid": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1553.356884] env[62816]: DEBUG oslo_concurrency.lockutils [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] Acquired lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.356979] env[62816]: DEBUG nova.network.neutron [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Refreshing network info cache for port 6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.358181] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:ea:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4adc8ed0-d11a-4510-9be0-b27c0da3a903', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6a0018bd-3abf-40af-978e-8bdd8a1e59ad', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:0b:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6d4ef133-b6f3-41d1-add4-92a1482195cf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6203dfb1-2392-4ca7-bcbd-c68af134c40c', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.370943] env[62816]: DEBUG oslo.service.loopingcall [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.378188] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.378188] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d4adbf9-6981-415c-8264-88a4d0490b9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.396177] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1553.403952] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525125} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.406019] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d/a01e772c-dafe-4091-bae6-f9f59d5c972d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1553.406019] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1553.406019] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1eeb53a1-2767-4100-a782-ddc40092b8b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.409329] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.409329] env[62816]: value = "task-1788408" [ 1553.409329] env[62816]: _type = "Task" [ 1553.409329] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.415261] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1553.415261] env[62816]: value = "task-1788409" [ 1553.415261] env[62816]: _type = "Task" [ 1553.415261] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.430464] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788408, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.430464] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788409, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.533914] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Releasing lock "refresh_cache-049e1f97-ab58-4797-a084-f16a7a58e2cc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.534405] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Instance network_info: |[{"id": "fe2be693-eb9b-4f94-b238-992c67bfedda", "address": "fa:16:3e:59:87:65", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2be693-eb", "ovs_interfaceid": "fe2be693-eb9b-4f94-b238-992c67bfedda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1553.534788] env[62816]: DEBUG oslo_concurrency.lockutils [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] Acquired lock "refresh_cache-049e1f97-ab58-4797-a084-f16a7a58e2cc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.535077] env[62816]: DEBUG nova.network.neutron [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Refreshing network info cache for port fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.537256] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:87:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe2be693-eb9b-4f94-b238-992c67bfedda', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1553.547051] env[62816]: DEBUG oslo.service.loopingcall [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1553.550454] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1553.551247] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6e62fde-9468-479c-a6a4-8a72c7a9b202 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.578512] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1553.578512] env[62816]: value = "task-1788410" [ 1553.578512] env[62816]: _type = "Task" [ 1553.578512] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.589609] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788410, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.657065] env[62816]: DEBUG nova.compute.utils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1553.663167] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1553.663167] env[62816]: DEBUG nova.network.neutron [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1553.730424] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d764d3-9859-c207-5f1d-35843d4e72ec, 'name': SearchDatastore_Task, 'duration_secs': 0.0598} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.730781] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.731036] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1553.731285] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.731448] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.731759] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1553.732081] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-629bfc0d-1793-480d-ac7d-23a72c69584d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.741537] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1553.741716] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1553.742583] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c45c03d-2e63-41f8-be0a-1cd4ed932b4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.751141] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1553.751141] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f23be7-12b5-09d1-f600-8c774e26a445" [ 1553.751141] env[62816]: _type = "Task" [ 1553.751141] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.760863] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f23be7-12b5-09d1-f600-8c774e26a445, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.929844] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788408, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.937122] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788409, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10526} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.938236] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1553.938567] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1553.939825] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b79507a-44fe-4517-8df5-78d33df774fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.967178] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d/a01e772c-dafe-4091-bae6-f9f59d5c972d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1553.971334] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c1e14bf-7aca-4995-a899-31be29b74bc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.995436] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1553.995436] env[62816]: value = "task-1788411" [ 1553.995436] env[62816]: _type = "Task" [ 1553.995436] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.004190] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788411, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.089584] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788410, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.100641] env[62816]: DEBUG nova.policy [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da3883d2c9f34a7282d7fda19aa1f4f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '830fc28618ac4a31856cca469d46a750', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1554.161984] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1554.181637] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.182727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.182727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.182727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.182727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.186441] env[62816]: INFO nova.compute.manager [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Terminating instance [ 1554.191203] env[62816]: DEBUG nova.compute.manager [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1554.191203] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1554.192086] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee4d6b3c-ffb9-4f16-8336-0a0943da3227 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.203035] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1554.207781] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4b8486d9-c532-49c9-ab6e-bd5f9d812459 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.213366] env[62816]: DEBUG oslo_vmware.api [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1554.213366] env[62816]: value = "task-1788412" [ 1554.213366] env[62816]: _type = "Task" [ 1554.213366] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.224185] env[62816]: DEBUG oslo_vmware.api [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.268154] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f23be7-12b5-09d1-f600-8c774e26a445, 'name': SearchDatastore_Task, 'duration_secs': 0.012708} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.269060] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f915a86-9879-4c4c-b2eb-76940e2628fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.272092] env[62816]: DEBUG nova.network.neutron [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Updated VIF entry in instance network info cache for port 6203dfb1-2392-4ca7-bcbd-c68af134c40c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.272504] env[62816]: DEBUG nova.network.neutron [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Updating instance_info_cache with network_info: [{"id": "6a0018bd-3abf-40af-978e-8bdd8a1e59ad", "address": "fa:16:3e:84:ea:fe", "network": {"id": "2556c13c-4384-4354-b52b-f146337d77c7", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1469143001", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4adc8ed0-d11a-4510-9be0-b27c0da3a903", "external-id": "nsx-vlan-transportzone-844", "segmentation_id": 844, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6a0018bd-3a", "ovs_interfaceid": "6a0018bd-3abf-40af-978e-8bdd8a1e59ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "address": "fa:16:3e:b6:0b:7e", "network": {"id": "abd7e484-d1ef-4023-9bb0-b1edeccfdd36", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1182060646", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6203dfb1-23", "ovs_interfaceid": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.283897] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1554.283897] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a06084-092b-22a3-df98-b403e8b7935f" [ 1554.283897] env[62816]: _type = "Task" [ 1554.283897] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.294783] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a06084-092b-22a3-df98-b403e8b7935f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.424121] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788408, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.506088] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788411, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.594944] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788410, 'name': CreateVM_Task, 'duration_secs': 0.832015} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.594944] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1554.595684] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.595941] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.596300] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1554.597202] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ce5abb6-54bc-4890-b8ed-669adb36fbb5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.602018] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1554.602018] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522eae9c-21f3-1df0-f81d-a2c3c0aca02d" [ 1554.602018] env[62816]: _type = "Task" [ 1554.602018] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.616336] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522eae9c-21f3-1df0-f81d-a2c3c0aca02d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.722976] env[62816]: DEBUG nova.network.neutron [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Updated VIF entry in instance network info cache for port fe2be693-eb9b-4f94-b238-992c67bfedda. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1554.723346] env[62816]: DEBUG nova.network.neutron [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Updating instance_info_cache with network_info: [{"id": "fe2be693-eb9b-4f94-b238-992c67bfedda", "address": "fa:16:3e:59:87:65", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe2be693-eb", "ovs_interfaceid": "fe2be693-eb9b-4f94-b238-992c67bfedda", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.731206] env[62816]: DEBUG oslo_vmware.api [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788412, 'name': PowerOffVM_Task, 'duration_secs': 0.281575} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.731515] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1554.731669] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1554.731899] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b8475698-8f8f-4db3-9707-1bba55c200a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.781359] env[62816]: DEBUG oslo_concurrency.lockutils [req-cb2fd035-2609-4b66-8f7b-6926e6d12aa9 req-0a253449-15b6-4b62-8c20-650086b97da4 service nova] Releasing lock "refresh_cache-83f7b5b8-228b-4d17-ab52-8df65fe247e3" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.785663] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26df0bdc-2407-4417-9de3-16f492e6905c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.800198] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c77456f-012c-4a41-85a8-4e4a550977be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.803927] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a06084-092b-22a3-df98-b403e8b7935f, 'name': SearchDatastore_Task, 'duration_secs': 0.014742} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.804407] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.804717] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1554.805403] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-919ab68c-81c4-4c3c-a803-cbfd48eca5a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.837959] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cedf22-ecf2-48bb-b694-2a6e1dee09d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.840841] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1554.840841] env[62816]: value = "task-1788414" [ 1554.840841] env[62816]: _type = "Task" [ 1554.840841] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.849411] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdd3fc8-d43e-49c6-add3-5dc25edd8812 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.860134] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.867564] env[62816]: DEBUG nova.compute.provider_tree [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1554.924713] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788408, 'name': CreateVM_Task, 'duration_secs': 1.159969} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.925649] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1554.926985] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.941272] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1554.941272] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1554.941397] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Deleting the datastore file [datastore1] 11a4d835-c149-49f0-8e4f-b3f9a7f1afca {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1554.941611] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bbfed58-4221-46d7-8610-ba3bd981505b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.949378] env[62816]: DEBUG oslo_vmware.api [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for the task: (returnval){ [ 1554.949378] env[62816]: value = "task-1788415" [ 1554.949378] env[62816]: _type = "Task" [ 1554.949378] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.960257] env[62816]: DEBUG oslo_vmware.api [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788415, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.005788] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788411, 'name': ReconfigVM_Task, 'duration_secs': 0.641383} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.006388] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Reconfigured VM instance instance-00000030 to attach disk [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d/a01e772c-dafe-4091-bae6-f9f59d5c972d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.006641] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=62816) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1555.007156] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-65e862b0-e6ed-4ac5-bb21-c82c87ebf5ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.015085] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1555.015085] env[62816]: value = "task-1788416" [ 1555.015085] env[62816]: _type = "Task" [ 1555.015085] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.026759] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788416, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.118505] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522eae9c-21f3-1df0-f81d-a2c3c0aca02d, 'name': SearchDatastore_Task, 'duration_secs': 0.049028} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.119627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.119627] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1555.119627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.119627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.119858] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1555.120055] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.120372] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1555.121275] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeb82ff0-7568-452d-99a4-4bdd9fffa5b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.122778] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4999027-de6a-40f0-b518-7fc59f2d11a4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.128671] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1555.128671] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5258ac3f-d41f-d5db-35f7-a1b32e146e18" [ 1555.128671] env[62816]: _type = "Task" [ 1555.128671] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.138216] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5258ac3f-d41f-d5db-35f7-a1b32e146e18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.139618] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1555.139819] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1555.140569] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96ba259c-6ec2-4ec0-a047-75930e0036d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.146796] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1555.146796] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cebd7f-1b2c-c504-2489-7dc8ce23fd75" [ 1555.146796] env[62816]: _type = "Task" [ 1555.146796] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.158551] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cebd7f-1b2c-c504-2489-7dc8ce23fd75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.176337] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1555.219814] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1555.220116] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1555.220719] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1555.220719] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1555.220719] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1555.220719] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1555.221267] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1555.221267] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1555.221267] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1555.221672] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1555.221672] env[62816]: DEBUG nova.virt.hardware [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1555.222590] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18203542-bcd3-436f-809a-541b5bc3ea12 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.225985] env[62816]: DEBUG oslo_concurrency.lockutils [req-790a8f1f-d6ea-413e-8921-718de8ec8f81 req-64cf2be8-c60b-486c-b540-3174231173f3 service nova] Releasing lock "refresh_cache-049e1f97-ab58-4797-a084-f16a7a58e2cc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.234139] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6115c286-f224-4f2f-a3fc-fa995fde5c25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.324034] env[62816]: DEBUG nova.network.neutron [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Successfully created port: 5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1555.360523] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788414, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.373136] env[62816]: DEBUG nova.scheduler.client.report [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.461586] env[62816]: DEBUG oslo_vmware.api [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Task: {'id': task-1788415, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291019} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.461586] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1555.461956] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1555.464073] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1555.464527] env[62816]: INFO nova.compute.manager [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1555.465497] env[62816]: DEBUG oslo.service.loopingcall [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1555.467034] env[62816]: DEBUG nova.compute.manager [-] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1555.467034] env[62816]: DEBUG nova.network.neutron [-] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1555.502839] env[62816]: DEBUG nova.compute.manager [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Received event network-changed-0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1555.502839] env[62816]: DEBUG nova.compute.manager [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Refreshing instance network info cache due to event network-changed-0aebe84d-1c20-4011-90d2-8e7f579b4b29. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1555.502839] env[62816]: DEBUG oslo_concurrency.lockutils [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] Acquiring lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.503118] env[62816]: DEBUG oslo_concurrency.lockutils [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] Acquired lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.503118] env[62816]: DEBUG nova.network.neutron [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Refreshing network info cache for port 0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1555.526301] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788416, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.053497} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.526640] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=62816) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1555.527463] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e80006-ea06-41a9-828c-6334e43c2207 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.557757] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d/ephemeral_0.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1555.558472] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-981520d3-df27-48e1-bcd6-93ce38db6bd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.581293] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1555.581293] env[62816]: value = "task-1788417" [ 1555.581293] env[62816]: _type = "Task" [ 1555.581293] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.590350] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788417, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.638968] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5258ac3f-d41f-d5db-35f7-a1b32e146e18, 'name': SearchDatastore_Task, 'duration_secs': 0.072743} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.639581] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.639883] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1555.640166] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.656478] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cebd7f-1b2c-c504-2489-7dc8ce23fd75, 'name': SearchDatastore_Task, 'duration_secs': 0.063135} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.657552] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-093c82f2-93a0-468f-b2a1-8e8e7fc2ce97 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.664696] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1555.664696] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5293d4df-ee68-df03-b50e-2e8a97359610" [ 1555.664696] env[62816]: _type = "Task" [ 1555.664696] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.673917] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5293d4df-ee68-df03-b50e-2e8a97359610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.856020] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.646867} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.856020] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1555.856020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1555.856347] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e6e5b3f-028a-411f-8286-b36799155665 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.864012] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1555.864012] env[62816]: value = "task-1788418" [ 1555.864012] env[62816]: _type = "Task" [ 1555.864012] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.872466] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788418, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.880490] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.733s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.881047] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1555.884035] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.351s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1555.884130] env[62816]: DEBUG nova.objects.instance [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lazy-loading 'resources' on Instance uuid 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1556.100289] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788417, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.176049] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5293d4df-ee68-df03-b50e-2e8a97359610, 'name': SearchDatastore_Task, 'duration_secs': 0.009818} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.176360] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.177084] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 049e1f97-ab58-4797-a084-f16a7a58e2cc/049e1f97-ab58-4797-a084-f16a7a58e2cc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.177084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.177219] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1556.177332] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da72b450-7f1a-4a33-8d4c-f7f7ba33a822 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.179581] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-959699a1-c8f8-4419-9000-8bb42051779f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.189167] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1556.189167] env[62816]: value = "task-1788419" [ 1556.189167] env[62816]: _type = "Task" [ 1556.189167] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.191097] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1556.191329] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1556.195164] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae1c75ca-6fe1-459f-8009-426f1649b689 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.206692] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1556.206692] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527795ff-14d4-eba5-00b0-b096e0b890a8" [ 1556.206692] env[62816]: _type = "Task" [ 1556.206692] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.207156] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788419, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.216370] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527795ff-14d4-eba5-00b0-b096e0b890a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009335} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.217345] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a17cbec1-a65f-472b-9953-a9d46d225852 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.222634] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1556.222634] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e58d84-f7b0-0794-b868-afdf9c467d25" [ 1556.222634] env[62816]: _type = "Task" [ 1556.222634] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.231027] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e58d84-f7b0-0794-b868-afdf9c467d25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.375951] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788418, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070345} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.377028] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1556.377367] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7dc90a1-d5a2-449e-8624-aeee90562c51 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.392951] env[62816]: DEBUG nova.compute.utils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1556.409054] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1556.409709] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1556.409709] env[62816]: DEBUG nova.network.neutron [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1556.414811] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f61d13dc-87bf-43e2-bb01-cad057bbcad8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.446599] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1556.446599] env[62816]: value = "task-1788420" [ 1556.446599] env[62816]: _type = "Task" [ 1556.446599] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.457681] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788420, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.512669] env[62816]: DEBUG nova.policy [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f28c6e00c66647a084da6dcb40697baf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6a4f555b56f4b1fae741a76b4140677', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1556.551585] env[62816]: DEBUG nova.network.neutron [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updated VIF entry in instance network info cache for port 0aebe84d-1c20-4011-90d2-8e7f579b4b29. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1556.551585] env[62816]: DEBUG nova.network.neutron [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updating instance_info_cache with network_info: [{"id": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "address": "fa:16:3e:a1:48:71", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.142", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0aebe84d-1c", "ovs_interfaceid": "0aebe84d-1c20-4011-90d2-8e7f579b4b29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.598580] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788417, 'name': ReconfigVM_Task, 'duration_secs': 0.862994} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.598940] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Reconfigured VM instance instance-00000030 to attach disk [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d/ephemeral_0.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1556.599674] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e1fc6b5-bfba-450e-88ca-8550c2c8560b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.608576] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1556.608576] env[62816]: value = "task-1788421" [ 1556.608576] env[62816]: _type = "Task" [ 1556.608576] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.619469] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788421, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.646903] env[62816]: DEBUG nova.network.neutron [-] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.701044] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788419, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.734431] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e58d84-f7b0-0794-b868-afdf9c467d25, 'name': SearchDatastore_Task, 'duration_secs': 0.008938} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.736794] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.737096] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 83f7b5b8-228b-4d17-ab52-8df65fe247e3/83f7b5b8-228b-4d17-ab52-8df65fe247e3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.737582] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0318874-f17e-4728-a69d-b33c91f45e96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.746200] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1556.746200] env[62816]: value = "task-1788422" [ 1556.746200] env[62816]: _type = "Task" [ 1556.746200] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.758473] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.908313] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1556.966056] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788420, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.990186] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ac72b7-5e66-4b81-b5ce-468aca1e1cb5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.006658] env[62816]: DEBUG nova.network.neutron [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Successfully created port: cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1557.009877] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a609c31-2cd3-49de-bea7-d313dab91d05 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.045903] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dc8040-b0bd-47a0-a10a-b7bc5b1bfafb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.054300] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc8543b-20c1-4382-9abf-2523446cf670 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.058249] env[62816]: DEBUG oslo_concurrency.lockutils [req-a04fe6a2-a666-45d1-9add-8bdab38e9ef2 req-de3e056d-3f79-45e2-a6ff-e66371627086 service nova] Releasing lock "refresh_cache-f9d9593a-1c25-47a1-98fd-4462a851f134" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.068646] env[62816]: DEBUG nova.compute.provider_tree [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.124102] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788421, 'name': Rename_Task, 'duration_secs': 0.390519} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.124512] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1557.124787] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee97b94a-d102-4332-b835-2b1eb5538850 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.135104] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1557.135104] env[62816]: value = "task-1788423" [ 1557.135104] env[62816]: _type = "Task" [ 1557.135104] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.146063] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788423, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.150044] env[62816]: INFO nova.compute.manager [-] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Took 1.68 seconds to deallocate network for instance. [ 1557.207516] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788419, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.256579] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788422, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.459720] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788420, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.576467] env[62816]: DEBUG nova.scheduler.client.report [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1557.645963] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788423, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.655167] env[62816]: DEBUG nova.network.neutron [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Successfully updated port: 5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1557.657976] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.703488] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788419, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.478951} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.703770] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 049e1f97-ab58-4797-a084-f16a7a58e2cc/049e1f97-ab58-4797-a084-f16a7a58e2cc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1557.703979] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1557.704598] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b442d822-d9e1-4e60-b9bc-b8dd38cac65f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.711325] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1557.711325] env[62816]: value = "task-1788424" [ 1557.711325] env[62816]: _type = "Task" [ 1557.711325] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.720362] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.760736] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788422, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.884714] env[62816]: DEBUG nova.compute.manager [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Received event network-vif-deleted-7b35c8f0-5f21-4920-93b4-f88823b815ab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1557.885877] env[62816]: DEBUG nova.compute.manager [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Received event network-vif-plugged-5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1557.885877] env[62816]: DEBUG oslo_concurrency.lockutils [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] Acquiring lock "ee543138-1c43-46c4-a512-1977fa5eb3c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.885877] env[62816]: DEBUG oslo_concurrency.lockutils [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.885877] env[62816]: DEBUG oslo_concurrency.lockutils [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.886350] env[62816]: DEBUG nova.compute.manager [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] No waiting events found dispatching network-vif-plugged-5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1557.886656] env[62816]: WARNING nova.compute.manager [req-a9c292f9-a688-45d2-9b72-e96a1e7352b4 req-0c64277b-2567-4d63-84de-b81c83f1ae44 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Received unexpected event network-vif-plugged-5f921b79-e02e-4aa2-b074-89b96a7890ff for instance with vm_state building and task_state spawning. [ 1557.919413] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1557.955469] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1557.955771] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1557.955929] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1557.956163] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1557.956346] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1557.956500] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1557.956770] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1557.956958] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1557.957151] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1557.957366] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1557.957565] env[62816]: DEBUG nova.virt.hardware [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1557.958923] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8065451c-b348-4e9f-8fa4-5685208ddbab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.965692] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788420, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.973436] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356617db-8df3-45a0-bee2-65121fcc0604 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.082537] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.198s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.085249] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.069s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.085579] env[62816]: DEBUG nova.objects.instance [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lazy-loading 'resources' on Instance uuid 48b74d52-e764-4d14-b372-fc34872205dd {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1558.115186] env[62816]: INFO nova.scheduler.client.report [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Deleted allocations for instance 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1 [ 1558.145777] env[62816]: DEBUG oslo_vmware.api [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788423, 'name': PowerOnVM_Task, 'duration_secs': 0.943182} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.146072] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1558.146282] env[62816]: INFO nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Took 10.37 seconds to spawn the instance on the hypervisor. [ 1558.146465] env[62816]: DEBUG nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1558.147367] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265503d8-b58b-4ec3-951e-2e547da3934c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.158254] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.158426] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.158540] env[62816]: DEBUG nova.network.neutron [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1558.223785] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071132} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.224069] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.224886] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767ea0e9-4256-4017-a427-4e0361d63774 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.252015] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 049e1f97-ab58-4797-a084-f16a7a58e2cc/049e1f97-ab58-4797-a084-f16a7a58e2cc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.252853] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4abb2958-fafa-4035-a9b2-13725fbf0a50 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.278985] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788422, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.343933} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.278985] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 83f7b5b8-228b-4d17-ab52-8df65fe247e3/83f7b5b8-228b-4d17-ab52-8df65fe247e3.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1558.278985] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1558.278985] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1558.278985] env[62816]: value = "task-1788425" [ 1558.278985] env[62816]: _type = "Task" [ 1558.278985] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.278985] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd76ab91-f326-4a1d-a1aa-8d851d417923 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.292832] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.294334] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1558.294334] env[62816]: value = "task-1788426" [ 1558.294334] env[62816]: _type = "Task" [ 1558.294334] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.458409] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788420, 'name': ReconfigVM_Task, 'duration_secs': 1.69019} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.458409] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1558.459163] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6395dd90-fa3b-4a9b-aa81-4d0ffb12707b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.469061] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1558.469061] env[62816]: value = "task-1788427" [ 1558.469061] env[62816]: _type = "Task" [ 1558.469061] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.475671] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788427, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.623881] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912d0896-65ce-4525-ace7-e7f4488485cb tempest-ServersV294TestFqdnHostnames-1201908949 tempest-ServersV294TestFqdnHostnames-1201908949-project-member] Lock "1e3f720c-5a6f-4e7c-aafc-b4680d9667e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.811s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.674542] env[62816]: INFO nova.compute.manager [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Took 42.84 seconds to build instance. [ 1558.749801] env[62816]: DEBUG nova.network.neutron [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.793031] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.807043] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788426, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071076} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.807339] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.808129] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b33bf14-cdca-4cdd-827b-046423fca157 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.837477] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 83f7b5b8-228b-4d17-ab52-8df65fe247e3/83f7b5b8-228b-4d17-ab52-8df65fe247e3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.838268] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29a48b33-d213-4821-acb7-6549613e09cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.864820] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1558.864820] env[62816]: value = "task-1788428" [ 1558.864820] env[62816]: _type = "Task" [ 1558.864820] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.873022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.873022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.873022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.873022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.873022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.876186] env[62816]: INFO nova.compute.manager [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Terminating instance [ 1558.881531] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.882268] env[62816]: DEBUG nova.compute.manager [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1558.882873] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1558.884206] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575f6299-ef69-4d81-8748-867e247ebc1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.893930] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.896744] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a2fe22f-962b-49cb-ae74-f4a618c6b4aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.904841] env[62816]: DEBUG oslo_vmware.api [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1558.904841] env[62816]: value = "task-1788429" [ 1558.904841] env[62816]: _type = "Task" [ 1558.904841] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.916381] env[62816]: DEBUG oslo_vmware.api [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788429, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.981975] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788427, 'name': Rename_Task, 'duration_secs': 0.348358} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.982258] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1558.982551] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8607ef3-a699-470e-a979-09869232aa1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.994027] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1558.994027] env[62816]: value = "task-1788430" [ 1558.994027] env[62816]: _type = "Task" [ 1558.994027] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.007962] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788430, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.080812] env[62816]: DEBUG nova.network.neutron [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updating instance_info_cache with network_info: [{"id": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "address": "fa:16:3e:7f:ac:ca", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f921b79-e0", "ovs_interfaceid": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.179679] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67cd99d1-2dc7-430c-8a73-f6433f13564e tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.454s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1559.218302] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578198c1-ea87-45a5-baeb-17781d6d75e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.225835] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c96cfc3-cdb7-47fb-ad9d-daa6aa52d0d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.256582] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fec7664-75c6-4999-bbb3-ee333b971cc9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.266251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8a266c-be9b-4d5e-bc9c-f916ac220aa9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.280703] env[62816]: DEBUG nova.compute.provider_tree [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1559.283486] env[62816]: DEBUG nova.network.neutron [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Successfully updated port: cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1559.300674] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788425, 'name': ReconfigVM_Task, 'duration_secs': 0.667765} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.301999] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 049e1f97-ab58-4797-a084-f16a7a58e2cc/049e1f97-ab58-4797-a084-f16a7a58e2cc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.302710] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09ca5fc3-7ce3-4c0a-82e3-1e01f943e897 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.310255] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1559.310255] env[62816]: value = "task-1788431" [ 1559.310255] env[62816]: _type = "Task" [ 1559.310255] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.319402] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788431, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.377236] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788428, 'name': ReconfigVM_Task, 'duration_secs': 0.363107} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.377661] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 83f7b5b8-228b-4d17-ab52-8df65fe247e3/83f7b5b8-228b-4d17-ab52-8df65fe247e3.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.378564] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68cc1b1d-3876-49fe-9887-effd9e73cd74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.385523] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1559.385523] env[62816]: value = "task-1788432" [ 1559.385523] env[62816]: _type = "Task" [ 1559.385523] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.397313] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788432, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.416835] env[62816]: DEBUG oslo_vmware.api [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788429, 'name': PowerOffVM_Task, 'duration_secs': 0.312418} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.416835] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1559.416835] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1559.416835] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98113951-3f00-45bb-a7ad-b6e978941f8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.500868] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1559.501063] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1559.501252] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Deleting the datastore file [datastore1] 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1559.501885] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91e7b04b-7585-400a-84d0-34e537c98cb7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.509201] env[62816]: DEBUG oslo_vmware.api [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788430, 'name': PowerOnVM_Task, 'duration_secs': 0.457777} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.510010] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1559.510436] env[62816]: INFO nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Took 6.93 seconds to spawn the instance on the hypervisor. [ 1559.510647] env[62816]: DEBUG nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1559.511646] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a8835b1-3b0a-40e5-8b2d-1bc2bc7807b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.516914] env[62816]: DEBUG oslo_vmware.api [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1559.516914] env[62816]: value = "task-1788434" [ 1559.516914] env[62816]: _type = "Task" [ 1559.516914] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.533446] env[62816]: DEBUG oslo_vmware.api [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.591522] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.591522] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Instance network_info: |[{"id": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "address": "fa:16:3e:7f:ac:ca", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f921b79-e0", "ovs_interfaceid": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1559.591522] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:ac:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '001929c7-0dc4-4b73-a9f1-d672f8377985', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f921b79-e02e-4aa2-b074-89b96a7890ff', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1559.598842] env[62816]: DEBUG oslo.service.loopingcall [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1559.599457] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1559.599952] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-26194a00-f5d9-4bf6-9d8f-f6d442253699 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.625180] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1559.625180] env[62816]: value = "task-1788435" [ 1559.625180] env[62816]: _type = "Task" [ 1559.625180] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.633245] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788435, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.790725] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.790919] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquired lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.791114] env[62816]: DEBUG nova.network.neutron [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1559.808794] env[62816]: ERROR nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [req-21c17055-7aaa-46f6-8cf0-e0f2305791cd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-21c17055-7aaa-46f6-8cf0-e0f2305791cd"}]} [ 1559.822118] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788431, 'name': Rename_Task, 'duration_secs': 0.171259} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.822452] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.822738] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91fc9954-0a32-4284-b201-d37a832c8b98 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.830289] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1559.830289] env[62816]: value = "task-1788436" [ 1559.830289] env[62816]: _type = "Task" [ 1559.830289] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.835545] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1559.842712] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788436, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.859964] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1559.859964] env[62816]: DEBUG nova.compute.provider_tree [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1559.876068] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1559.899183] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788432, 'name': Rename_Task, 'duration_secs': 0.184688} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.900639] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1559.903146] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.904279] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec115304-34fb-4127-b3a1-39d0971668e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.910681] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1559.910681] env[62816]: value = "task-1788437" [ 1559.910681] env[62816]: _type = "Task" [ 1559.910681] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.919177] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788437, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.037731] env[62816]: DEBUG oslo_vmware.api [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169681} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.040113] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1560.040745] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1560.040986] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1560.041186] env[62816]: INFO nova.compute.manager [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1560.041806] env[62816]: DEBUG oslo.service.loopingcall [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1560.042888] env[62816]: INFO nova.compute.manager [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Took 29.59 seconds to build instance. [ 1560.043101] env[62816]: DEBUG nova.compute.manager [-] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1560.043205] env[62816]: DEBUG nova.network.neutron [-] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1560.137579] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788435, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.344056] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788436, 'name': PowerOnVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.368368] env[62816]: DEBUG nova.network.neutron [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1560.420601] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788437, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.428218] env[62816]: DEBUG nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Received event network-changed-5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1560.428218] env[62816]: DEBUG nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Refreshing instance network info cache due to event network-changed-5f921b79-e02e-4aa2-b074-89b96a7890ff. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1560.428218] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Acquiring lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.428218] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Acquired lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.428218] env[62816]: DEBUG nova.network.neutron [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Refreshing network info cache for port 5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1560.431398] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa36dd7-1c9e-4ccd-bfcc-6abe067102a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.442093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4266e00-50ce-45ed-875b-65c9232a8431 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.479818] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fac3282-24ad-4dd1-8978-bc14876fc21e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.494200] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf1c79-3ae6-4eb9-b941-7fa60dba5706 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.511137] env[62816]: DEBUG nova.compute.provider_tree [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1560.546620] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fa31f97e-51ed-49ff-8c36-53c22d231b0d tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.251s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.636874] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788435, 'name': CreateVM_Task, 'duration_secs': 0.641675} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.637210] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1560.637752] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.637968] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.638253] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1560.638835] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a0bb7fe-f4e7-48c3-b9c5-2b9a6d8aa033 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.645318] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1560.645318] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521858b9-deea-efee-be74-6876bad2f026" [ 1560.645318] env[62816]: _type = "Task" [ 1560.645318] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.653494] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521858b9-deea-efee-be74-6876bad2f026, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.748597] env[62816]: DEBUG nova.network.neutron [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updating instance_info_cache with network_info: [{"id": "cb0d8306-4954-4597-b857-f3410e8e30d8", "address": "fa:16:3e:85:21:f5", "network": {"id": "b41cf939-d61d-47cd-a21d-4effc220b206", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-127719284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6a4f555b56f4b1fae741a76b4140677", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb0d8306-49", "ovs_interfaceid": "cb0d8306-4954-4597-b857-f3410e8e30d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1560.841904] env[62816]: DEBUG oslo_vmware.api [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788436, 'name': PowerOnVM_Task, 'duration_secs': 0.598164} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.842477] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.842744] env[62816]: INFO nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Took 10.95 seconds to spawn the instance on the hypervisor. [ 1560.842966] env[62816]: DEBUG nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1560.844138] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90cfa8d-eaa2-4695-a5d7-1c19da4821ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.920428] env[62816]: DEBUG oslo_vmware.api [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788437, 'name': PowerOnVM_Task, 'duration_secs': 0.560374} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.920689] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.920792] env[62816]: INFO nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Took 16.30 seconds to spawn the instance on the hypervisor. [ 1560.920965] env[62816]: DEBUG nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1560.921893] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3842c1-3c3c-4495-acf6-050e48a07dbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.032714] env[62816]: ERROR nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] [req-aff4b88d-78c3-4e61-9e9f-b70f48a17d07] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-aff4b88d-78c3-4e61-9e9f-b70f48a17d07"}]} [ 1561.053186] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1561.058354] env[62816]: DEBUG nova.network.neutron [-] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.074583] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1561.074842] env[62816]: DEBUG nova.compute.provider_tree [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1561.101426] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1561.132858] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1561.163410] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521858b9-deea-efee-be74-6876bad2f026, 'name': SearchDatastore_Task, 'duration_secs': 0.008807} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.163727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.164051] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1561.165673] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.165673] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.165673] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1561.168618] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7420501a-5a54-4ed8-8538-9828770813f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.182891] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1561.183354] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1561.184324] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f2eeb45-810b-4980-84fd-9566b64ec6cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.190883] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1561.190883] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5234b43b-c561-7eda-329a-81897b9c6af9" [ 1561.190883] env[62816]: _type = "Task" [ 1561.190883] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.206536] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5234b43b-c561-7eda-329a-81897b9c6af9, 'name': SearchDatastore_Task, 'duration_secs': 0.009274} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.211917] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f488aec0-6b6b-4e33-9ee1-a5212959ceaa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.218688] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1561.218688] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a5b444-31fa-98e8-bf80-cbc7dc29281e" [ 1561.218688] env[62816]: _type = "Task" [ 1561.218688] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.233807] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a5b444-31fa-98e8-bf80-cbc7dc29281e, 'name': SearchDatastore_Task, 'duration_secs': 0.008955} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.233807] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.233807] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/ee543138-1c43-46c4-a512-1977fa5eb3c6.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1561.233807] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13e90eff-50db-4769-8d13-7f136d587997 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.244988] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1561.244988] env[62816]: value = "task-1788438" [ 1561.244988] env[62816]: _type = "Task" [ 1561.244988] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.251896] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Releasing lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.251896] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Instance network_info: |[{"id": "cb0d8306-4954-4597-b857-f3410e8e30d8", "address": "fa:16:3e:85:21:f5", "network": {"id": "b41cf939-d61d-47cd-a21d-4effc220b206", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-127719284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6a4f555b56f4b1fae741a76b4140677", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb0d8306-49", "ovs_interfaceid": "cb0d8306-4954-4597-b857-f3410e8e30d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1561.252045] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:21:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'df1bf911-aac9-4d2d-ae69-66ace3e6a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cb0d8306-4954-4597-b857-f3410e8e30d8', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1561.262381] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Creating folder: Project (c6a4f555b56f4b1fae741a76b4140677). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1561.267472] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da8373c8-3956-4d80-bd60-5c7fa39b381a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.275386] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788438, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.275860] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.276020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.287994] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Created folder: Project (c6a4f555b56f4b1fae741a76b4140677) in parent group-v370905. [ 1561.287994] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Creating folder: Instances. Parent ref: group-v371053. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1561.287994] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2a9e246-340d-401a-b0a4-6eb954c66926 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.290241] env[62816]: DEBUG nova.network.neutron [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updated VIF entry in instance network info cache for port 5f921b79-e02e-4aa2-b074-89b96a7890ff. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1561.290241] env[62816]: DEBUG nova.network.neutron [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updating instance_info_cache with network_info: [{"id": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "address": "fa:16:3e:7f:ac:ca", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f921b79-e0", "ovs_interfaceid": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1561.309220] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Created folder: Instances in parent group-v371053. [ 1561.309220] env[62816]: DEBUG oslo.service.loopingcall [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1561.309220] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1561.309220] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-315dba4d-982e-4e5b-8e51-8480b7624d27 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.327386] env[62816]: DEBUG nova.compute.manager [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Received event network-changed-bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1561.327568] env[62816]: DEBUG nova.compute.manager [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Refreshing instance network info cache due to event network-changed-bf038e03-93db-4837-8a8e-6b876acd1b7c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1561.327776] env[62816]: DEBUG oslo_concurrency.lockutils [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] Acquiring lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.327912] env[62816]: DEBUG oslo_concurrency.lockutils [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] Acquired lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.328074] env[62816]: DEBUG nova.network.neutron [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Refreshing network info cache for port bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.335494] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1561.335494] env[62816]: value = "task-1788441" [ 1561.335494] env[62816]: _type = "Task" [ 1561.335494] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.348994] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788441, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.369524] env[62816]: INFO nova.compute.manager [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Took 38.41 seconds to build instance. [ 1561.445202] env[62816]: INFO nova.compute.manager [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Took 49.85 seconds to build instance. [ 1561.561137] env[62816]: INFO nova.compute.manager [-] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Took 1.52 seconds to deallocate network for instance. [ 1561.687911] env[62816]: INFO nova.compute.manager [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Rebuilding instance [ 1561.738276] env[62816]: DEBUG nova.compute.manager [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1561.739363] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ba74ac-4869-44c9-b17d-ce8730ab405b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.745463] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cfde8b-c4bd-48c8-a36f-1bb4d01bbf84 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.766541] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42471f-a5e4-495c-a555-2f406c0cee5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.770583] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788438, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466353} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.770860] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/ee543138-1c43-46c4-a512-1977fa5eb3c6.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1561.771187] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1561.771683] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b8ca60f-ff94-4bdc-bf32-1cefff59204c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.802276] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1561.805965] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Releasing lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1561.805965] env[62816]: DEBUG nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Received event network-vif-plugged-cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1561.805965] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Acquiring lock "31ac8296-14fa-46f7-b825-c31904b832d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.806101] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Lock "31ac8296-14fa-46f7-b825-c31904b832d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.806284] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Lock "31ac8296-14fa-46f7-b825-c31904b832d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.806444] env[62816]: DEBUG nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] No waiting events found dispatching network-vif-plugged-cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1561.806660] env[62816]: WARNING nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Received unexpected event network-vif-plugged-cb0d8306-4954-4597-b857-f3410e8e30d8 for instance with vm_state building and task_state spawning. [ 1561.806901] env[62816]: DEBUG nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Received event network-changed-cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1561.806999] env[62816]: DEBUG nova.compute.manager [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Refreshing instance network info cache due to event network-changed-cb0d8306-4954-4597-b857-f3410e8e30d8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1561.807200] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Acquiring lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.807641] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Acquired lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.807641] env[62816]: DEBUG nova.network.neutron [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Refreshing network info cache for port cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1561.811791] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f8d51c-8733-4281-bc7c-b82401a60d98 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.814668] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1561.814668] env[62816]: value = "task-1788442" [ 1561.814668] env[62816]: _type = "Task" [ 1561.814668] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.823228] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2fd371-4621-4fd9-bb9c-cc2ffeb13c7d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.830536] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788442, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.841122] env[62816]: DEBUG nova.compute.provider_tree [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.851955] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788441, 'name': CreateVM_Task, 'duration_secs': 0.382107} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.852195] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1561.852881] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1561.852987] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1561.853426] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1561.853608] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8dfe9b48-8353-48c3-93dd-97e7c33e0380 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.859741] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1561.859741] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52169088-4057-ceea-49a7-5e24d4b8e46d" [ 1561.859741] env[62816]: _type = "Task" [ 1561.859741] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.867898] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52169088-4057-ceea-49a7-5e24d4b8e46d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.874835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77193975-d4d4-4fd1-b0a6-caf4984083e4 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.775s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.946716] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8508c126-6bf5-47a2-856e-5ae1ae887028 tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.543s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.069151] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.216161] env[62816]: DEBUG nova.network.neutron [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updated VIF entry in instance network info cache for port bf038e03-93db-4837-8a8e-6b876acd1b7c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.216864] env[62816]: DEBUG nova.network.neutron [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updating instance_info_cache with network_info: [{"id": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "address": "fa:16:3e:81:17:26", "network": {"id": "3f767f41-19c5-48e4-9495-7d58f14aea5f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-333332138-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "934fdecf54c6435999885451fc2204ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbf038e03-93", "ovs_interfaceid": "bf038e03-93db-4837-8a8e-6b876acd1b7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.261625] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1562.261952] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfc90665-32c8-4369-96e6-5c07c8877614 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.277381] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1562.277381] env[62816]: value = "task-1788443" [ 1562.277381] env[62816]: _type = "Task" [ 1562.277381] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.291305] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788443, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.331988] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788442, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065742} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.332416] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1562.333339] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9188ff7-9bd6-43a4-9422-46f9f02f04fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.353295] env[62816]: DEBUG nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1562.362657] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/ee543138-1c43-46c4-a512-1977fa5eb3c6.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1562.364860] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.367702] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f2a16e9-6679-4b07-ba5a-b9353e29e3af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.393062] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52169088-4057-ceea-49a7-5e24d4b8e46d, 'name': SearchDatastore_Task, 'duration_secs': 0.009043} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.393841] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.393841] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1562.394029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.394180] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.394351] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1562.394740] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1562.394740] env[62816]: value = "task-1788444" [ 1562.394740] env[62816]: _type = "Task" [ 1562.394740] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.395173] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4ed640d3-04d2-47f5-955d-e992b7ca587b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.407784] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788444, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.409116] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1562.409317] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1562.410160] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-886e8de2-4389-41c1-86da-b6490e0659f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.416277] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1562.416277] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cdaa85-a86a-f306-65f1-6ed19aeb4cbd" [ 1562.416277] env[62816]: _type = "Task" [ 1562.416277] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.424639] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cdaa85-a86a-f306-65f1-6ed19aeb4cbd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.499821] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edda32e-5adf-407b-8c25-4cb08e08c739 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.507501] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Suspending the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1562.509880] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-19b12a27-c651-4135-92f7-feb96167895d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.515791] env[62816]: DEBUG oslo_vmware.api [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] Waiting for the task: (returnval){ [ 1562.515791] env[62816]: value = "task-1788445" [ 1562.515791] env[62816]: _type = "Task" [ 1562.515791] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.525640] env[62816]: DEBUG oslo_vmware.api [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] Task: {'id': task-1788445, 'name': SuspendVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.656089] env[62816]: DEBUG nova.network.neutron [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updated VIF entry in instance network info cache for port cb0d8306-4954-4597-b857-f3410e8e30d8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1562.656755] env[62816]: DEBUG nova.network.neutron [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updating instance_info_cache with network_info: [{"id": "cb0d8306-4954-4597-b857-f3410e8e30d8", "address": "fa:16:3e:85:21:f5", "network": {"id": "b41cf939-d61d-47cd-a21d-4effc220b206", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-127719284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6a4f555b56f4b1fae741a76b4140677", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb0d8306-49", "ovs_interfaceid": "cb0d8306-4954-4597-b857-f3410e8e30d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1562.723015] env[62816]: DEBUG oslo_concurrency.lockutils [req-e85e8279-93e5-4c30-92f9-1731e9bf597a req-8bf413ae-bacb-42ac-bec7-da57bd68435a service nova] Releasing lock "refresh_cache-a01e772c-dafe-4091-bae6-f9f59d5c972d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1562.774556] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.774791] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.776441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.776441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.776441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.782113] env[62816]: INFO nova.compute.manager [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Terminating instance [ 1562.784632] env[62816]: DEBUG nova.compute.manager [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1562.785520] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1562.785874] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c56aac-ab12-4cd6-a4e2-24ae96d1e962 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.795371] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788443, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.799869] env[62816]: DEBUG nova.compute.manager [req-6c35d272-70d3-41e2-b69b-92c324d086a3 req-2d7ad5d8-3bbf-4b02-b904-457fc5c6b4c9 service nova] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Received event network-vif-deleted-9f0023d4-6a62-4c6b-862d-83c21341da28 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1562.803226] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1562.803474] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-790e7eee-2c0e-4683-9002-4e6316e6d7c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.811686] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1562.811686] env[62816]: value = "task-1788446" [ 1562.811686] env[62816]: _type = "Task" [ 1562.811686] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.821180] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788446, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.865414] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.780s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.868255] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.152s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.868255] env[62816]: DEBUG nova.objects.instance [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lazy-loading 'resources' on Instance uuid 42093232-a4e5-4cc3-ab1c-a0023a91e102 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1562.893362] env[62816]: INFO nova.scheduler.client.report [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Deleted allocations for instance 48b74d52-e764-4d14-b372-fc34872205dd [ 1562.908257] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788444, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.926835] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cdaa85-a86a-f306-65f1-6ed19aeb4cbd, 'name': SearchDatastore_Task, 'duration_secs': 0.010418} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.927506] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e33cf5d3-7435-4b70-be85-24d199a72831 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.933889] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1562.933889] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526aa267-ad13-274e-a54d-3f0cad71b051" [ 1562.933889] env[62816]: _type = "Task" [ 1562.933889] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.944806] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526aa267-ad13-274e-a54d-3f0cad71b051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.026446] env[62816]: DEBUG oslo_vmware.api [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] Task: {'id': task-1788445, 'name': SuspendVM_Task} progress is 83%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.159821] env[62816]: DEBUG oslo_concurrency.lockutils [req-6ec29384-00e9-4c2a-ba6c-0511d8a68783 req-82af5cd6-8fad-4e9c-b165-7d2a7c92e825 service nova] Releasing lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.291164] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788443, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.326066] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788446, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.407787] env[62816]: DEBUG oslo_concurrency.lockutils [None req-39dd2e32-4db0-4292-ba6b-c9b601c30961 tempest-ServerTagsTestJSON-764429377 tempest-ServerTagsTestJSON-764429377-project-member] Lock "48b74d52-e764-4d14-b372-fc34872205dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.642s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.419745] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788444, 'name': ReconfigVM_Task, 'duration_secs': 0.728217} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.420644] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Reconfigured VM instance instance-00000033 to attach disk [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/ee543138-1c43-46c4-a512-1977fa5eb3c6.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1563.420844] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9295a831-bd1a-43c3-b5ce-89a8674a21f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.428022] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1563.428022] env[62816]: value = "task-1788447" [ 1563.428022] env[62816]: _type = "Task" [ 1563.428022] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.452231] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788447, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.452231] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526aa267-ad13-274e-a54d-3f0cad71b051, 'name': SearchDatastore_Task, 'duration_secs': 0.018241} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.452640] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.453447] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 31ac8296-14fa-46f7-b825-c31904b832d5/31ac8296-14fa-46f7-b825-c31904b832d5.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1563.453447] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4dda840b-16ac-43b0-bc72-294f8c562404 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.460325] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1563.460325] env[62816]: value = "task-1788448" [ 1563.460325] env[62816]: _type = "Task" [ 1563.460325] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.468558] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.529157] env[62816]: DEBUG oslo_vmware.api [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] Task: {'id': task-1788445, 'name': SuspendVM_Task, 'duration_secs': 0.915599} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.529913] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Suspended the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1563.529913] env[62816]: DEBUG nova.compute.manager [None req-f60c7891-4526-4a72-af77-c3dcdf48c177 tempest-ServersAdminNegativeTestJSON-1638612245 tempest-ServersAdminNegativeTestJSON-1638612245-project-admin] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1563.530421] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648009f8-2363-4458-b6ed-962ee66486a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.796501] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788443, 'name': PowerOffVM_Task, 'duration_secs': 1.149408} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.796796] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1563.797035] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1563.797861] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bc7362-1748-43c4-ae3e-4f97c14e55a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.805435] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1563.805749] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4e694b2-83cf-4320-af17-9cb961280eb9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.824313] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788446, 'name': PowerOffVM_Task, 'duration_secs': 0.621207} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.828056] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1563.828304] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1563.828806] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db3ec6fb-c60d-4bc2-aee8-9b78696a715d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.834473] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1563.834644] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1563.835051] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Deleting the datastore file [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1563.835155] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68fe18b7-19c2-4867-86e0-d76a72be7850 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.842395] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1563.842395] env[62816]: value = "task-1788451" [ 1563.842395] env[62816]: _type = "Task" [ 1563.842395] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.854678] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788451, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.863246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937c3b17-aaf2-4516-90b0-7af90ccfbbb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.871459] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0deb9da-5942-40a4-b2d8-28c511c4a4ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.915170] env[62816]: DEBUG oslo_concurrency.lockutils [None req-564b156d-3c38-4b88-9ff2-726efd969bb5 tempest-ServersListShow296Test-56642911 tempest-ServersListShow296Test-56642911-project-member] Acquiring lock "a73e3b40-34e4-427f-b34b-ac2d3d233565" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1563.915416] env[62816]: DEBUG oslo_concurrency.lockutils [None req-564b156d-3c38-4b88-9ff2-726efd969bb5 tempest-ServersListShow296Test-56642911 tempest-ServersListShow296Test-56642911-project-member] Lock "a73e3b40-34e4-427f-b34b-ac2d3d233565" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.916978] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250d0e6f-1ed4-4476-bac5-bee73ee2691e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.925527] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b298eeda-5f02-4829-ab51-8c3507d55785 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.943974] env[62816]: DEBUG nova.compute.provider_tree [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1563.952056] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788447, 'name': Rename_Task, 'duration_secs': 0.159325} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.952848] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.953516] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-574d7227-5aca-4639-a233-2159e535540e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.963465] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1563.963465] env[62816]: value = "task-1788452" [ 1563.963465] env[62816]: _type = "Task" [ 1563.963465] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.976430] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788452, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.980155] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454697} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.980433] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 31ac8296-14fa-46f7-b825-c31904b832d5/31ac8296-14fa-46f7-b825-c31904b832d5.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1563.980741] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1563.980890] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-76cee8a1-d88b-4a96-addd-626b3a35c28a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.987614] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1563.987614] env[62816]: value = "task-1788453" [ 1563.987614] env[62816]: _type = "Task" [ 1563.987614] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.997365] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788453, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.059420] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1564.059420] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1564.059420] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Deleting the datastore file [datastore1] 83f7b5b8-228b-4d17-ab52-8df65fe247e3 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1564.059420] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebd7b716-bbae-43cf-8437-cf5030a2daa5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.067114] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for the task: (returnval){ [ 1564.067114] env[62816]: value = "task-1788454" [ 1564.067114] env[62816]: _type = "Task" [ 1564.067114] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.076278] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788454, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.358735] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788451, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155287} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.359092] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1564.359308] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1564.359492] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1564.420568] env[62816]: DEBUG nova.compute.manager [None req-564b156d-3c38-4b88-9ff2-726efd969bb5 tempest-ServersListShow296Test-56642911 tempest-ServersListShow296Test-56642911-project-member] [instance: a73e3b40-34e4-427f-b34b-ac2d3d233565] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1564.472514] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788452, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.473674] env[62816]: ERROR nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] [req-0558fbb9-26ef-4a04-8c3f-5b20bc89ce1d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 159, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0558fbb9-26ef-4a04-8c3f-5b20bc89ce1d"}]} [ 1564.494411] env[62816]: DEBUG nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1564.501323] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788453, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.136753} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.501323] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1564.501726] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-725dbf38-8361-4668-8a77-d46ea36f9b89 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.535341] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 31ac8296-14fa-46f7-b825-c31904b832d5/31ac8296-14fa-46f7-b825-c31904b832d5.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1564.536572] env[62816]: DEBUG nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1564.536802] env[62816]: DEBUG nova.compute.provider_tree [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1564.539087] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43d7f721-3dbd-4553-826e-0433e45a42f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.560533] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1564.560533] env[62816]: value = "task-1788455" [ 1564.560533] env[62816]: _type = "Task" [ 1564.560533] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.568874] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.570269] env[62816]: DEBUG nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1564.581038] env[62816]: DEBUG oslo_vmware.api [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Task: {'id': task-1788454, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.583863] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1564.583863] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1564.583863] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1564.583863] env[62816]: INFO nova.compute.manager [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Took 1.80 seconds to destroy the instance on the hypervisor. [ 1564.583863] env[62816]: DEBUG oslo.service.loopingcall [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1564.583863] env[62816]: DEBUG nova.compute.manager [-] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1564.583863] env[62816]: DEBUG nova.network.neutron [-] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1564.604945] env[62816]: DEBUG nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1564.925527] env[62816]: DEBUG nova.compute.manager [None req-564b156d-3c38-4b88-9ff2-726efd969bb5 tempest-ServersListShow296Test-56642911 tempest-ServersListShow296Test-56642911-project-member] [instance: a73e3b40-34e4-427f-b34b-ac2d3d233565] Instance disappeared before build. {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2440}} [ 1564.978966] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788452, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.070349] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788455, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.168019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a060aa1-5b27-46be-9867-7abde231feff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.168019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aae909e-6fee-4304-847b-bf53b072048c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.197812] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e543296-4e92-4e97-b97d-cf2b49b9ae30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.207324] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be015df4-68de-453f-b9f7-760dcd4470ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.222236] env[62816]: DEBUG nova.compute.provider_tree [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1565.408271] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1565.410231] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1565.410408] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1565.410603] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1565.410747] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1565.410894] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1565.411134] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1565.411301] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1565.411468] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1565.411634] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1565.411807] env[62816]: DEBUG nova.virt.hardware [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1565.412707] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e126e1d-2fa3-4105-8e98-ecd61ebb6d07 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.421749] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610ed119-3ec0-4da1-ba22-db6492f1dffd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.437941] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance VIF info [] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1565.443322] env[62816]: DEBUG oslo.service.loopingcall [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1565.444752] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1565.444752] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-23b70877-3d20-443b-917f-fe237b6e701e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.456752] env[62816]: DEBUG oslo_concurrency.lockutils [None req-564b156d-3c38-4b88-9ff2-726efd969bb5 tempest-ServersListShow296Test-56642911 tempest-ServersListShow296Test-56642911-project-member] Lock "a73e3b40-34e4-427f-b34b-ac2d3d233565" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.541s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1565.462641] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1565.462641] env[62816]: value = "task-1788456" [ 1565.462641] env[62816]: _type = "Task" [ 1565.462641] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.476636] env[62816]: DEBUG oslo_vmware.api [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788452, 'name': PowerOnVM_Task, 'duration_secs': 1.081613} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.480115] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1565.480342] env[62816]: INFO nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Took 10.30 seconds to spawn the instance on the hypervisor. [ 1565.480524] env[62816]: DEBUG nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1565.480757] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788456, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.481855] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea889054-7e1f-492a-8ce8-0432de0bf6af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.570754] env[62816]: DEBUG nova.compute.manager [req-c977af36-0ee2-48e2-82d4-66565ee580d5 req-abe53869-3410-40e6-935f-58334844d93f service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received event network-vif-deleted-6a0018bd-3abf-40af-978e-8bdd8a1e59ad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1565.571026] env[62816]: INFO nova.compute.manager [req-c977af36-0ee2-48e2-82d4-66565ee580d5 req-abe53869-3410-40e6-935f-58334844d93f service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Neutron deleted interface 6a0018bd-3abf-40af-978e-8bdd8a1e59ad; detaching it from the instance and deleting it from the info cache [ 1565.571287] env[62816]: DEBUG nova.network.neutron [req-c977af36-0ee2-48e2-82d4-66565ee580d5 req-abe53869-3410-40e6-935f-58334844d93f service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Updating instance_info_cache with network_info: [{"id": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "address": "fa:16:3e:b6:0b:7e", "network": {"id": "abd7e484-d1ef-4023-9bb0-b1edeccfdd36", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1182060646", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "8e20c8f5bdd64f1d89157aa0b947431e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6d4ef133-b6f3-41d1-add4-92a1482195cf", "external-id": "nsx-vlan-transportzone-446", "segmentation_id": 446, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6203dfb1-23", "ovs_interfaceid": "6203dfb1-2392-4ca7-bcbd-c68af134c40c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.579442] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788455, 'name': ReconfigVM_Task, 'duration_secs': 0.610261} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.581641] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 31ac8296-14fa-46f7-b825-c31904b832d5/31ac8296-14fa-46f7-b825-c31904b832d5.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1565.583051] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-207befa5-f0dc-4941-a704-0363ace3f4e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.591461] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1565.591461] env[62816]: value = "task-1788457" [ 1565.591461] env[62816]: _type = "Task" [ 1565.591461] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.602587] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788457, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.725906] env[62816]: DEBUG nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1565.977552] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788456, 'name': CreateVM_Task, 'duration_secs': 0.264788} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.977744] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1565.978204] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1565.978381] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1565.978750] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1565.979088] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c703e444-cc5c-4d84-a76e-9bcd864084fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.983948] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1565.983948] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52eee282-b744-dc2c-1b5f-fb14f4d6d08e" [ 1565.983948] env[62816]: _type = "Task" [ 1565.983948] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.993022] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52eee282-b744-dc2c-1b5f-fb14f4d6d08e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.999621] env[62816]: INFO nova.compute.manager [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Took 34.53 seconds to build instance. [ 1566.074506] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cbf03b2-0b5c-4002-999f-057505ddfd87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.082704] env[62816]: DEBUG nova.network.neutron [-] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1566.087208] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70761cf-1c37-4ed6-8ee3-9bd7d5c4198d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.104169] env[62816]: INFO nova.compute.manager [-] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Took 1.52 seconds to deallocate network for instance. [ 1566.131141] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788457, 'name': Rename_Task, 'duration_secs': 0.211849} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.147202] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1566.147202] env[62816]: DEBUG nova.compute.manager [req-c977af36-0ee2-48e2-82d4-66565ee580d5 req-abe53869-3410-40e6-935f-58334844d93f service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Detach interface failed, port_id=6a0018bd-3abf-40af-978e-8bdd8a1e59ad, reason: Instance 83f7b5b8-228b-4d17-ab52-8df65fe247e3 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1566.147202] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6da15d67-c9af-4ed2-b68c-5e11dd26f851 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.155023] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1566.155023] env[62816]: value = "task-1788458" [ 1566.155023] env[62816]: _type = "Task" [ 1566.155023] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.163397] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788458, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.230954] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.363s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.234308] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 19.701s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.289244] env[62816]: INFO nova.scheduler.client.report [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Deleted allocations for instance 42093232-a4e5-4cc3-ab1c-a0023a91e102 [ 1566.496184] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52eee282-b744-dc2c-1b5f-fb14f4d6d08e, 'name': SearchDatastore_Task, 'duration_secs': 0.00946} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.496491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.496717] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1566.496944] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.497094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.497268] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1566.497522] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3968c749-2659-4804-98b4-f799094a61ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.501811] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e53176ec-047b-409d-9459-d196fe3925b4 tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.190s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.508041] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1566.508041] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1566.508041] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a79d4af9-094c-479e-b49c-badc629bf9ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.513593] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1566.513593] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b00cec-d0d5-e3eb-de0b-60de0d1ce6b9" [ 1566.513593] env[62816]: _type = "Task" [ 1566.513593] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.522476] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b00cec-d0d5-e3eb-de0b-60de0d1ce6b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.618636] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.669329] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788458, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.740530] env[62816]: INFO nova.compute.claims [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1566.798846] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f2e46bde-1ced-4755-bfda-ec2cb043b3b1 tempest-ListServerFiltersTestJSON-142177963 tempest-ListServerFiltersTestJSON-142177963-project-member] Lock "42093232-a4e5-4cc3-ab1c-a0023a91e102" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.261s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.031779] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b00cec-d0d5-e3eb-de0b-60de0d1ce6b9, 'name': SearchDatastore_Task, 'duration_secs': 0.008451} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.035444] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-852f87b0-2a35-4e17-89af-9ae316eb63b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.040448] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1567.040448] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526adba6-1a35-4b88-03d3-e69cbd75a96d" [ 1567.040448] env[62816]: _type = "Task" [ 1567.040448] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.052547] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526adba6-1a35-4b88-03d3-e69cbd75a96d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.167275] env[62816]: DEBUG oslo_vmware.api [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788458, 'name': PowerOnVM_Task, 'duration_secs': 0.92141} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.167275] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1567.167275] env[62816]: INFO nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1567.167275] env[62816]: DEBUG nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1567.167275] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3cc6d84-2786-4640-9bb9-8ac4416a4d9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.217539] env[62816]: INFO nova.compute.manager [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Rescuing [ 1567.219503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.219503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.219503] env[62816]: DEBUG nova.network.neutron [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1567.248165] env[62816]: INFO nova.compute.resource_tracker [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating resource usage from migration 7b53fd5b-adc7-497d-8fbe-fa8da06269e9 [ 1567.553620] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526adba6-1a35-4b88-03d3-e69cbd75a96d, 'name': SearchDatastore_Task, 'duration_secs': 0.010024} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.556610] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.556960] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1567.557894] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab0db05e-9ea3-43d7-922a-11e664b15a40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.565678] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1567.565678] env[62816]: value = "task-1788459" [ 1567.565678] env[62816]: _type = "Task" [ 1567.565678] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.576722] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788459, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.689350] env[62816]: INFO nova.compute.manager [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Took 34.46 seconds to build instance. [ 1567.813868] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a859f7ab-37e5-4380-ad3b-04856069d8d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.827981] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b426712b-ec1c-4210-a5a5-cdaf2bff24bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.865319] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67967e4f-69d4-4b83-9b0d-72df6aa112d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.878666] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e415173-715f-44c8-aad4-59eff6462fa5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.900026] env[62816]: DEBUG nova.compute.provider_tree [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1568.017826] env[62816]: DEBUG nova.compute.manager [req-2731d9f5-225b-4228-88a9-d872e5c51598 req-6b1fd26b-2c6f-45d9-b45e-cb70077726dd service nova] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Received event network-vif-deleted-6203dfb1-2392-4ca7-bcbd-c68af134c40c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1568.076024] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788459, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472088} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.076545] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1568.076857] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1568.077186] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c176a00e-2d03-45bd-a68b-68d9da9023b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.091666] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1568.091666] env[62816]: value = "task-1788460" [ 1568.091666] env[62816]: _type = "Task" [ 1568.091666] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.104706] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788460, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.191210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b06b4d0f-bac6-40a0-ace7-c4bd297cd9f3 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "31ac8296-14fa-46f7-b825-c31904b832d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.206s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.338075] env[62816]: DEBUG nova.network.neutron [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updating instance_info_cache with network_info: [{"id": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "address": "fa:16:3e:7f:ac:ca", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f921b79-e0", "ovs_interfaceid": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.402693] env[62816]: DEBUG nova.scheduler.client.report [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1568.602349] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788460, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092913} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.602792] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1568.603622] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3952784-7144-46a6-96b2-424f424ab1cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.631118] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1568.631118] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09729fa5-079f-4d09-a1e8-2482ebdb8463 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.653493] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1568.653493] env[62816]: value = "task-1788461" [ 1568.653493] env[62816]: _type = "Task" [ 1568.653493] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.663752] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788461, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.841460] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.908088] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.674s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.908369] env[62816]: INFO nova.compute.manager [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Migrating [ 1568.916353] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.033s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.919024] env[62816]: INFO nova.compute.claims [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1568.986949] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "049e1f97-ab58-4797-a084-f16a7a58e2cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.987147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.987334] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "049e1f97-ab58-4797-a084-f16a7a58e2cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.987519] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.987684] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.989767] env[62816]: INFO nova.compute.manager [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Terminating instance [ 1568.991510] env[62816]: DEBUG nova.compute.manager [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1568.991849] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1568.992679] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b9da04-967a-4662-9e40-c4f2c7c6e475 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.000617] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1569.000869] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d92303b4-e754-4410-b7a1-1aa640e4f58e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.087597] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1569.087803] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1569.087986] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Deleting the datastore file [datastore1] 049e1f97-ab58-4797-a084-f16a7a58e2cc {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1569.088621] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3182301d-9634-4887-9fbb-9ea1ba928b8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.096044] env[62816]: DEBUG oslo_vmware.api [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1569.096044] env[62816]: value = "task-1788463" [ 1569.096044] env[62816]: _type = "Task" [ 1569.096044] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.108032] env[62816]: DEBUG oslo_vmware.api [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788463, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.166259] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788461, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.384216] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1569.384504] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8b965d1-583e-45df-a4b0-a8aa45898304 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.393912] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1569.393912] env[62816]: value = "task-1788464" [ 1569.393912] env[62816]: _type = "Task" [ 1569.393912] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.410880] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788464, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.439915] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.440171] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.440397] env[62816]: DEBUG nova.network.neutron [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.607090] env[62816]: DEBUG oslo_vmware.api [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788463, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137436} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.607425] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1569.607425] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1569.607654] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1569.607897] env[62816]: INFO nova.compute.manager [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1569.608305] env[62816]: DEBUG oslo.service.loopingcall [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.608451] env[62816]: DEBUG nova.compute.manager [-] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1569.608577] env[62816]: DEBUG nova.network.neutron [-] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1569.666660] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788461, 'name': ReconfigVM_Task, 'duration_secs': 0.694198} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.666895] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599/3c4cca03-b2ee-48a2-9a15-a21124bd6599.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1569.667545] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3854cbd9-6804-4e0c-a411-8d3fd1003c8d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.674394] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1569.674394] env[62816]: value = "task-1788465" [ 1569.674394] env[62816]: _type = "Task" [ 1569.674394] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.683597] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788465, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.907984] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788464, 'name': PowerOffVM_Task, 'duration_secs': 0.467077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.908707] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1569.909805] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec6e64e-b66a-4595-aba5-b9cbbec8c825 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.939195] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e9141ed-fe6e-48a6-8eb5-ad0ab6a7eb3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.991696] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1569.991696] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4143cb5c-6ba9-4474-aa14-993cfa078710 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.999477] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1569.999477] env[62816]: value = "task-1788466" [ 1569.999477] env[62816]: _type = "Task" [ 1569.999477] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.016363] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1570.016653] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1570.016957] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.017177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.017403] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.018706] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e90debba-e21e-45f9-a4ea-c97042cc0578 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.027479] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.028425] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1570.028425] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70b9d15f-cbea-4e0b-86f7-3d74387b2dc3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.040448] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1570.040448] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5291be6b-8d79-1a1f-7a0e-0fdbb8db12ab" [ 1570.040448] env[62816]: _type = "Task" [ 1570.040448] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.049464] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5291be6b-8d79-1a1f-7a0e-0fdbb8db12ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.186463] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788465, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.431334] env[62816]: DEBUG nova.network.neutron [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.497165] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4563b2f-1f42-4b56-af4b-8a12e1f5a757 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.506443] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93522cd7-1020-4f87-9c45-f0307387c77e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.549904] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ed3be2-aa9d-4875-a136-bfbebda569c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.554617] env[62816]: DEBUG nova.compute.manager [req-7663f790-ddc4-4015-8f21-97ff37432a52 req-86bf1ee1-f50c-4cf4-b52d-5f4158d78ed0 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Received event network-vif-deleted-fe2be693-eb9b-4f94-b238-992c67bfedda {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1570.555095] env[62816]: INFO nova.compute.manager [req-7663f790-ddc4-4015-8f21-97ff37432a52 req-86bf1ee1-f50c-4cf4-b52d-5f4158d78ed0 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Neutron deleted interface fe2be693-eb9b-4f94-b238-992c67bfedda; detaching it from the instance and deleting it from the info cache [ 1570.555546] env[62816]: DEBUG nova.network.neutron [req-7663f790-ddc4-4015-8f21-97ff37432a52 req-86bf1ee1-f50c-4cf4-b52d-5f4158d78ed0 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.565287] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5291be6b-8d79-1a1f-7a0e-0fdbb8db12ab, 'name': SearchDatastore_Task, 'duration_secs': 0.008268} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.569132] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95c5ad21-3654-4306-9a45-d831a38e77ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.575083] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2136e95-e05b-40f4-900e-39fad980b474 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.595009] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1570.595009] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527659ff-3ef1-378b-aaa5-bfe890dbd2db" [ 1570.595009] env[62816]: _type = "Task" [ 1570.595009] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.596061] env[62816]: DEBUG nova.compute.provider_tree [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.608393] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527659ff-3ef1-378b-aaa5-bfe890dbd2db, 'name': SearchDatastore_Task, 'duration_secs': 0.010017} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.608740] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.609090] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. {{(pid=62816) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1570.609763] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-430cd9fb-390d-40c2-bc30-8f98b5f1b691 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.616811] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1570.616811] env[62816]: value = "task-1788467" [ 1570.616811] env[62816]: _type = "Task" [ 1570.616811] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.627394] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788467, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.685826] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788465, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.759488] env[62816]: DEBUG nova.network.neutron [-] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.938692] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.058421] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-18381e72-7392-4f84-9ca4-37e83329eade {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.074654] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f712b22-2dfb-4629-ab6f-5d199563052e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.120142] env[62816]: DEBUG nova.scheduler.client.report [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1571.128448] env[62816]: DEBUG nova.compute.manager [req-7663f790-ddc4-4015-8f21-97ff37432a52 req-86bf1ee1-f50c-4cf4-b52d-5f4158d78ed0 service nova] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Detach interface failed, port_id=fe2be693-eb9b-4f94-b238-992c67bfedda, reason: Instance 049e1f97-ab58-4797-a084-f16a7a58e2cc could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1571.154447] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788467, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45982} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.155146] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. [ 1571.156187] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e5a5d5-5013-4eb2-a53c-5a380340e8ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.205984] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1571.210304] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-85241350-e3a2-4eca-a344-c83482c74f5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.230898] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788465, 'name': Rename_Task, 'duration_secs': 1.151577} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.232402] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1571.233089] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1571.233089] env[62816]: value = "task-1788468" [ 1571.233089] env[62816]: _type = "Task" [ 1571.233089] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.233089] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74cbe8bf-6232-4688-8295-8602f886bd2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.244074] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788468, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.245286] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1571.245286] env[62816]: value = "task-1788469" [ 1571.245286] env[62816]: _type = "Task" [ 1571.245286] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.257964] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "b788e586-850b-46e7-a204-d80eac56cce7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.258355] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "b788e586-850b-46e7-a204-d80eac56cce7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.265191] env[62816]: INFO nova.compute.manager [-] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Took 1.65 seconds to deallocate network for instance. [ 1571.635515] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.718s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.635765] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1571.645096] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.469s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.645539] env[62816]: DEBUG nova.objects.instance [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lazy-loading 'resources' on Instance uuid ba6e94c9-eb58-4040-8e28-f255961e76ca {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1571.748169] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.757942] env[62816]: DEBUG oslo_vmware.api [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788469, 'name': PowerOnVM_Task, 'duration_secs': 0.476569} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.757942] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1571.757942] env[62816]: DEBUG nova.compute.manager [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1571.757942] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d12679f7-61bb-482d-a634-7d1d6aca0165 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.761653] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1571.770674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.149247] env[62816]: DEBUG nova.compute.utils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1572.158601] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1572.158601] env[62816]: DEBUG nova.network.neutron [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1572.252785] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788468, 'name': ReconfigVM_Task, 'duration_secs': 0.620563} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.253135] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Reconfigured VM instance instance-00000033 to attach disk [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1572.254381] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15558b02-4dcc-47eb-a5e8-06f4f839dbc3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.262507] env[62816]: DEBUG nova.policy [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a3267ab64e4640bf00a0e5dbaaf044', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d830983a3c14168b8f0b67478f27589', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1572.299898] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0b44d34-e26c-41a0-9862-4dbe2f7a3828 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.310452] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.317329] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1572.317329] env[62816]: value = "task-1788470" [ 1572.317329] env[62816]: _type = "Task" [ 1572.317329] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.328620] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788470, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.331970] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.460018] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b86309-8d1f-4189-b5be-63d4c5165dbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.481838] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1572.659200] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1572.694459] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a31bc2-e867-4fbb-a3ac-f87a83730609 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.704930] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090549f4-09db-430e-9015-6daef66d93f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.749375] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-245b75e7-4821-4163-8707-f30d3d6b0771 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.755455] env[62816]: DEBUG nova.compute.manager [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Received event network-changed-cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1572.755652] env[62816]: DEBUG nova.compute.manager [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Refreshing instance network info cache due to event network-changed-cb0d8306-4954-4597-b857-f3410e8e30d8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1572.755863] env[62816]: DEBUG oslo_concurrency.lockutils [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] Acquiring lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.756039] env[62816]: DEBUG oslo_concurrency.lockutils [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] Acquired lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.756218] env[62816]: DEBUG nova.network.neutron [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Refreshing network info cache for port cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1572.762778] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d83372-407d-4d52-8d79-18567b5d399e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.778752] env[62816]: DEBUG nova.compute.provider_tree [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1572.828229] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788470, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.868826] env[62816]: DEBUG nova.network.neutron [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Successfully created port: 376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1572.988763] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1572.988763] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-382ac422-04b8-425f-8a0b-d5038010e6eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.000078] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1573.000078] env[62816]: value = "task-1788471" [ 1573.000078] env[62816]: _type = "Task" [ 1573.000078] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.010947] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.191257] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.191257] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.191602] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.191602] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.192254] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.198348] env[62816]: INFO nova.compute.manager [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Terminating instance [ 1573.199326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "refresh_cache-3c4cca03-b2ee-48a2-9a15-a21124bd6599" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1573.199480] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquired lock "refresh_cache-3c4cca03-b2ee-48a2-9a15-a21124bd6599" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.199683] env[62816]: DEBUG nova.network.neutron [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1573.285556] env[62816]: DEBUG nova.scheduler.client.report [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1573.327399] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788470, 'name': ReconfigVM_Task, 'duration_secs': 0.600588} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.327651] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1573.327862] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc3e5b12-b8c3-4aee-b7ff-302246f5aa34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.340997] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1573.340997] env[62816]: value = "task-1788472" [ 1573.340997] env[62816]: _type = "Task" [ 1573.340997] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.355285] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788472, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.511076] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788471, 'name': PowerOffVM_Task, 'duration_secs': 0.174739} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.511365] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1573.511547] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1573.675103] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1573.703386] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1573.703670] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1573.703841] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1573.704036] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1573.704188] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1573.704332] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1573.704539] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1573.704727] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1573.704905] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1573.705210] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1573.705408] env[62816]: DEBUG nova.virt.hardware [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1573.707957] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c41c21-b205-42f8-bd81-0ceacc599366 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.716328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4b04a0-82c1-48af-86aa-cc1280a6a880 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.729869] env[62816]: DEBUG nova.network.neutron [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1573.790226] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.792425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.109s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.792665] env[62816]: DEBUG nova.objects.instance [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'resources' on Instance uuid 6767c231-2dcb-4d19-ae7c-5b026d48ed26 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1573.797334] env[62816]: DEBUG nova.network.neutron [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.822187] env[62816]: INFO nova.scheduler.client.report [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleted allocations for instance ba6e94c9-eb58-4040-8e28-f255961e76ca [ 1573.853531] env[62816]: DEBUG oslo_vmware.api [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788472, 'name': PowerOnVM_Task, 'duration_secs': 0.367912} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.853531] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1573.855690] env[62816]: DEBUG nova.compute.manager [None req-bd448e46-2bbe-40c2-ab6b-c96d93e3346f tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1573.856758] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a367eeea-8d1b-449b-bed4-bb5e07f92f90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.876132] env[62816]: DEBUG nova.network.neutron [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updated VIF entry in instance network info cache for port cb0d8306-4954-4597-b857-f3410e8e30d8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1573.876479] env[62816]: DEBUG nova.network.neutron [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updating instance_info_cache with network_info: [{"id": "cb0d8306-4954-4597-b857-f3410e8e30d8", "address": "fa:16:3e:85:21:f5", "network": {"id": "b41cf939-d61d-47cd-a21d-4effc220b206", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-127719284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6a4f555b56f4b1fae741a76b4140677", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "df1bf911-aac9-4d2d-ae69-66ace3e6a2d1", "external-id": "nsx-vlan-transportzone-395", "segmentation_id": 395, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcb0d8306-49", "ovs_interfaceid": "cb0d8306-4954-4597-b857-f3410e8e30d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.017962] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.018254] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.018511] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.018813] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.019273] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.019408] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.019753] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.020061] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.020363] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.020670] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.020960] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.031806] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acc188ac-26b5-4c65-8113-c65636663264 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.052227] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1574.052227] env[62816]: value = "task-1788473" [ 1574.052227] env[62816]: _type = "Task" [ 1574.052227] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.060934] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.301821] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Releasing lock "refresh_cache-3c4cca03-b2ee-48a2-9a15-a21124bd6599" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.304942] env[62816]: DEBUG nova.compute.manager [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1574.304942] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1574.304942] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de21fba-5f81-42ee-a23a-2aa01b17efdc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.314844] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1574.315504] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a4b8880-fa47-4db2-a887-7118da525d5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.324912] env[62816]: DEBUG oslo_vmware.api [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1574.324912] env[62816]: value = "task-1788474" [ 1574.324912] env[62816]: _type = "Task" [ 1574.324912] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.332711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a95187dd-7f44-483c-8397-d8e50a9afa09 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "ba6e94c9-eb58-4040-8e28-f255961e76ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.016s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.343387] env[62816]: DEBUG oslo_vmware.api [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.381035] env[62816]: DEBUG oslo_concurrency.lockutils [req-fc6677a7-31c8-4c05-8262-a2864227d7fe req-cc2efb35-ef4c-4b68-ad60-eff90115ef80 service nova] Releasing lock "refresh_cache-31ac8296-14fa-46f7-b825-c31904b832d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.569502] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788473, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.664834] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "ede88298-0eae-4471-b602-c26b5fa7a72a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.668085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.844988] env[62816]: DEBUG oslo_vmware.api [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788474, 'name': PowerOffVM_Task, 'duration_secs': 0.211151} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.844988] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1574.844988] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1574.844988] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-652dd819-a11c-4ad3-8b30-e5bce91253d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.871881] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1574.872757] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1574.873807] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Deleting the datastore file [datastore1] 3c4cca03-b2ee-48a2-9a15-a21124bd6599 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1574.873807] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17856611-bb35-4c27-816a-b2798a1919e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.880279] env[62816]: DEBUG oslo_vmware.api [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for the task: (returnval){ [ 1574.880279] env[62816]: value = "task-1788476" [ 1574.880279] env[62816]: _type = "Task" [ 1574.880279] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.885374] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0320627b-aa47-41a9-80b5-3fd154a3e535 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.896108] env[62816]: DEBUG oslo_vmware.api [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788476, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.900085] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dffe27-f907-407e-baa0-c30dc0ea1d9e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.940686] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0ffb21-df0a-427d-86b9-1e006d84439d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.953731] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4717ab-86cd-4022-847c-2fbad9a02d61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.968308] env[62816]: DEBUG nova.compute.provider_tree [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.989183] env[62816]: INFO nova.compute.manager [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Unrescuing [ 1574.989398] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.989553] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquired lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.989765] env[62816]: DEBUG nova.network.neutron [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.062352] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788473, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.173113] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1575.341060] env[62816]: DEBUG nova.network.neutron [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Successfully updated port: 376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.382847] env[62816]: DEBUG nova.compute.manager [req-2f8e23c3-0027-4b9d-b1aa-d8113eeec96e req-7a5ca2a5-04a1-4296-8305-37921d88d2b7 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Received event network-vif-plugged-376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1575.383075] env[62816]: DEBUG oslo_concurrency.lockutils [req-2f8e23c3-0027-4b9d-b1aa-d8113eeec96e req-7a5ca2a5-04a1-4296-8305-37921d88d2b7 service nova] Acquiring lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.384034] env[62816]: DEBUG oslo_concurrency.lockutils [req-2f8e23c3-0027-4b9d-b1aa-d8113eeec96e req-7a5ca2a5-04a1-4296-8305-37921d88d2b7 service nova] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.384034] env[62816]: DEBUG oslo_concurrency.lockutils [req-2f8e23c3-0027-4b9d-b1aa-d8113eeec96e req-7a5ca2a5-04a1-4296-8305-37921d88d2b7 service nova] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.384034] env[62816]: DEBUG nova.compute.manager [req-2f8e23c3-0027-4b9d-b1aa-d8113eeec96e req-7a5ca2a5-04a1-4296-8305-37921d88d2b7 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] No waiting events found dispatching network-vif-plugged-376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1575.384034] env[62816]: WARNING nova.compute.manager [req-2f8e23c3-0027-4b9d-b1aa-d8113eeec96e req-7a5ca2a5-04a1-4296-8305-37921d88d2b7 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Received unexpected event network-vif-plugged-376123cb-17d7-4137-a4aa-f396ee425d69 for instance with vm_state building and task_state spawning. [ 1575.396021] env[62816]: DEBUG oslo_vmware.api [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Task: {'id': task-1788476, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176268} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.396021] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1575.396021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1575.396021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1575.396021] env[62816]: INFO nova.compute.manager [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1575.396021] env[62816]: DEBUG oslo.service.loopingcall [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1575.396021] env[62816]: DEBUG nova.compute.manager [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1575.396021] env[62816]: DEBUG nova.network.neutron [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1575.413043] env[62816]: DEBUG nova.network.neutron [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1575.475021] env[62816]: DEBUG nova.scheduler.client.report [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1575.563220] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788473, 'name': ReconfigVM_Task, 'duration_secs': 1.178208} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.563573] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1575.692631] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.766517] env[62816]: DEBUG nova.network.neutron [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updating instance_info_cache with network_info: [{"id": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "address": "fa:16:3e:7f:ac:ca", "network": {"id": "a3705035-2707-4a90-bba3-0475e61e59af", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1934658223-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "830fc28618ac4a31856cca469d46a750", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "001929c7-0dc4-4b73-a9f1-d672f8377985", "external-id": "nsx-vlan-transportzone-230", "segmentation_id": 230, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f921b79-e0", "ovs_interfaceid": "5f921b79-e02e-4aa2-b074-89b96a7890ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.846703] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "refresh_cache-1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.846989] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "refresh_cache-1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.847236] env[62816]: DEBUG nova.network.neutron [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.892605] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.893154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.916289] env[62816]: DEBUG nova.network.neutron [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.980208] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.187s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.981823] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.022s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.982069] env[62816]: DEBUG nova.objects.instance [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lazy-loading 'resources' on Instance uuid 946dad01-c012-457d-8bfe-6395ff0aaedf {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1576.003334] env[62816]: INFO nova.scheduler.client.report [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted allocations for instance 6767c231-2dcb-4d19-ae7c-5b026d48ed26 [ 1576.070470] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0af33be3-1673-42f1-a298-c50b616c7610',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2008575729',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1576.070725] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1576.070885] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1576.071082] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1576.071231] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1576.071378] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1576.071581] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1576.071740] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1576.071908] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1576.072087] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1576.072256] env[62816]: DEBUG nova.virt.hardware [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1576.077784] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1576.078379] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68d3257a-9783-485d-85a1-03f143e10cbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.098045] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1576.098045] env[62816]: value = "task-1788477" [ 1576.098045] env[62816]: _type = "Task" [ 1576.098045] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.108037] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788477, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.269419] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Releasing lock "refresh_cache-ee543138-1c43-46c4-a512-1977fa5eb3c6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.270184] env[62816]: DEBUG nova.objects.instance [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lazy-loading 'flavor' on Instance uuid ee543138-1c43-46c4-a512-1977fa5eb3c6 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1576.391135] env[62816]: DEBUG nova.network.neutron [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1576.395885] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1576.419448] env[62816]: INFO nova.compute.manager [-] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Took 1.02 seconds to deallocate network for instance. [ 1576.511731] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9378e9c8-a53e-4808-a6cf-840524eff66f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "6767c231-2dcb-4d19-ae7c-5b026d48ed26" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.860s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.614011] env[62816]: DEBUG nova.network.neutron [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Updating instance_info_cache with network_info: [{"id": "376123cb-17d7-4137-a4aa-f396ee425d69", "address": "fa:16:3e:8e:10:d7", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376123cb-17", "ovs_interfaceid": "376123cb-17d7-4137-a4aa-f396ee425d69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.620196] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788477, 'name': ReconfigVM_Task, 'duration_secs': 0.220057} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.621156] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1576.621348] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d308c99-5116-4ea6-933f-09886df242db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.648742] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1576.652239] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08cda8c0-2317-4534-834e-934ae5a227c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.673728] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1576.673728] env[62816]: value = "task-1788478" [ 1576.673728] env[62816]: _type = "Task" [ 1576.673728] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.685453] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788478, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.775404] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7527ceb7-b94d-48b6-8ed7-c7ea1d3bf819 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.798704] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1576.799054] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d3d36e8-bf02-4a51-830d-72bc2c8c0559 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.806089] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1576.806089] env[62816]: value = "task-1788479" [ 1576.806089] env[62816]: _type = "Task" [ 1576.806089] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.814170] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788479, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.915430] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.929614] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.058156] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af32f5c-4543-4bef-a38e-b4a5e6c271bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.065685] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5118cef1-f056-4516-a21b-bdb6fcef0dab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.101019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b50fc9e-1897-4b6e-970f-25079e25177d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.108268] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2759ec26-c4f7-448f-890f-c18c5c8388b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.124453] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "refresh_cache-1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.124567] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Instance network_info: |[{"id": "376123cb-17d7-4137-a4aa-f396ee425d69", "address": "fa:16:3e:8e:10:d7", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376123cb-17", "ovs_interfaceid": "376123cb-17d7-4137-a4aa-f396ee425d69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1577.125180] env[62816]: DEBUG nova.compute.provider_tree [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1577.129626] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:10:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '376123cb-17d7-4137-a4aa-f396ee425d69', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1577.135548] env[62816]: DEBUG oslo.service.loopingcall [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1577.136205] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1577.136790] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-492df14f-0d4e-4981-8b61-5620b5670298 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.159029] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1577.159029] env[62816]: value = "task-1788480" [ 1577.159029] env[62816]: _type = "Task" [ 1577.159029] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.171073] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788480, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.183045] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788478, 'name': ReconfigVM_Task, 'duration_secs': 0.265933} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.183370] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.183650] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1577.316591] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788479, 'name': PowerOffVM_Task, 'duration_secs': 0.210198} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.316874] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1577.322034] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Reconfiguring VM instance instance-00000033 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1577.322319] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32685dc4-6bd2-41a2-b365-50713648e16f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.341778] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1577.341778] env[62816]: value = "task-1788481" [ 1577.341778] env[62816]: _type = "Task" [ 1577.341778] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.349840] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788481, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.411248] env[62816]: DEBUG nova.compute.manager [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Received event network-changed-376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1577.411444] env[62816]: DEBUG nova.compute.manager [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Refreshing instance network info cache due to event network-changed-376123cb-17d7-4137-a4aa-f396ee425d69. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1577.411654] env[62816]: DEBUG oslo_concurrency.lockutils [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] Acquiring lock "refresh_cache-1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.411795] env[62816]: DEBUG oslo_concurrency.lockutils [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] Acquired lock "refresh_cache-1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.411957] env[62816]: DEBUG nova.network.neutron [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Refreshing network info cache for port 376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1577.638086] env[62816]: DEBUG nova.scheduler.client.report [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1577.672412] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788480, 'name': CreateVM_Task, 'duration_secs': 0.399422} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.672672] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1577.673439] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.673788] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.674077] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.674615] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1aacb69d-1b8a-474c-8a80-b5e1d551c023 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.679807] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1577.679807] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5201e126-5720-df17-1962-782a49fab9d8" [ 1577.679807] env[62816]: _type = "Task" [ 1577.679807] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.689476] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5201e126-5720-df17-1962-782a49fab9d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.690302] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7c9765-b487-42b2-aaae-acd7e436d962 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.710370] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6683b05d-8198-4e1d-8e04-274461199642 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.732972] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1577.852912] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788481, 'name': ReconfigVM_Task, 'duration_secs': 0.237713} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.853349] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Reconfigured VM instance instance-00000033 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1577.853654] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1577.853986] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b153cedb-47e9-4e75-b13b-c9bb3904b684 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.860581] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1577.860581] env[62816]: value = "task-1788482" [ 1577.860581] env[62816]: _type = "Task" [ 1577.860581] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.868753] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.145700] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.153198] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.372s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.155034] env[62816]: INFO nova.compute.claims [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.193054] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5201e126-5720-df17-1962-782a49fab9d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009142} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.193395] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.193606] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1578.193839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.194305] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.194305] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1578.194480] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ccc3606-90e3-4f06-937c-c3796c9f1ca5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.204021] env[62816]: INFO nova.scheduler.client.report [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted allocations for instance 946dad01-c012-457d-8bfe-6395ff0aaedf [ 1578.211447] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1578.211673] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1578.212470] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7976b82a-a4da-461f-b30b-a065f281b957 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.218749] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1578.218749] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cc6923-5738-e8f1-aae6-db8fe7d064d5" [ 1578.218749] env[62816]: _type = "Task" [ 1578.218749] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.226695] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cc6923-5738-e8f1-aae6-db8fe7d064d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.310154] env[62816]: DEBUG nova.network.neutron [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Updated VIF entry in instance network info cache for port 376123cb-17d7-4137-a4aa-f396ee425d69. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.311962] env[62816]: DEBUG nova.network.neutron [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Updating instance_info_cache with network_info: [{"id": "376123cb-17d7-4137-a4aa-f396ee425d69", "address": "fa:16:3e:8e:10:d7", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap376123cb-17", "ovs_interfaceid": "376123cb-17d7-4137-a4aa-f396ee425d69", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.339341] env[62816]: DEBUG nova.network.neutron [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Port 5b2b9d44-f66e-428f-a75c-6e213ebdb364 binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1578.377354] env[62816]: DEBUG oslo_vmware.api [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788482, 'name': PowerOnVM_Task, 'duration_secs': 0.362428} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.377976] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1578.378264] env[62816]: DEBUG nova.compute.manager [None req-eccafa0c-7045-422e-8ec3-90bf42b74ced tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1578.379462] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f49e37-b08e-49e2-8d9c-5c1c17989ab6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.713951] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a8ab459b-7959-408b-8196-3885e69806da tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "946dad01-c012-457d-8bfe-6395ff0aaedf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.270s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.730055] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cc6923-5738-e8f1-aae6-db8fe7d064d5, 'name': SearchDatastore_Task, 'duration_secs': 0.009399} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.731412] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-defa0994-52d6-499d-bd8b-9c76dfdb92ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.737385] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1578.737385] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525355ea-869b-4a5d-215a-b6f45cbd8e1e" [ 1578.737385] env[62816]: _type = "Task" [ 1578.737385] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.747027] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525355ea-869b-4a5d-215a-b6f45cbd8e1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.778369] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.778999] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.816673] env[62816]: DEBUG oslo_concurrency.lockutils [req-d14ed142-5879-4952-b02e-c911ff104ea7 req-c8c1b10f-92da-4d8f-925c-1292ea140c81 service nova] Releasing lock "refresh_cache-1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.167854] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.167854] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.168066] env[62816]: INFO nova.compute.manager [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Shelving [ 1579.249913] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525355ea-869b-4a5d-215a-b6f45cbd8e1e, 'name': SearchDatastore_Task, 'duration_secs': 0.020862} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.250237] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.250493] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac/1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1579.250993] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-19ca6fe6-5db0-410d-a516-16597c174c1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.258986] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1579.258986] env[62816]: value = "task-1788483" [ 1579.258986] env[62816]: _type = "Task" [ 1579.258986] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.270372] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.282022] env[62816]: INFO nova.compute.manager [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Detaching volume ee7bf020-3b58-4597-a084-12bb888e6072 [ 1579.323053] env[62816]: INFO nova.virt.block_device [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Attempting to driver detach volume ee7bf020-3b58-4597-a084-12bb888e6072 from mountpoint /dev/sdb [ 1579.323311] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1579.323502] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371031', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'name': 'volume-ee7bf020-3b58-4597-a084-12bb888e6072', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c3392d3-cfb0-47c6-9366-8c363ad21297', 'attached_at': '', 'detached_at': '', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'serial': 'ee7bf020-3b58-4597-a084-12bb888e6072'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1579.324402] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84085f93-78d8-4750-8624-dd73cd46a1d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.356880] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc8f34f-3671-4304-bc78-2e83b1a561b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.365994] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.366246] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.366414] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.373897] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7989942c-6a0f-4657-8bc7-b6e09ca01d61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.398696] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e886f15-776e-468f-8b37-a883bc58ab85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.415594] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] The volume has not been displaced from its original location: [datastore1] volume-ee7bf020-3b58-4597-a084-12bb888e6072/volume-ee7bf020-3b58-4597-a084-12bb888e6072.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1579.421084] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Reconfiguring VM instance instance-0000001d to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1579.424036] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e97a8037-0003-4282-bb84-63fb6fa33504 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.442131] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.442391] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "dd833e38-691c-4757-9c6b-659c74343d3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.446969] env[62816]: DEBUG oslo_vmware.api [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1579.446969] env[62816]: value = "task-1788484" [ 1579.446969] env[62816]: _type = "Task" [ 1579.446969] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.459692] env[62816]: DEBUG oslo_vmware.api [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788484, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.632538] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b587a0-dcf5-4472-83cd-1a53a96ae8c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.641580] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46ea5d8-c877-460e-abba-a61a6509e0d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.675751] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ced7f88-d84d-4dd4-b8d7-a907527e9bc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.681011] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1579.682169] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0db5d97-c24d-46d7-b6e5-d5d1b9c5f1ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.688217] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5bca9a-a3f1-44fe-8aa2-a07fefdaa801 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.693847] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1579.693847] env[62816]: value = "task-1788485" [ 1579.693847] env[62816]: _type = "Task" [ 1579.693847] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.705020] env[62816]: DEBUG nova.compute.provider_tree [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1579.711311] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788485, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.770741] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442918} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.771069] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac/1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1579.771326] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1579.771599] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2adf803-e38b-4761-bd2c-09d20d99e536 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.778342] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1579.778342] env[62816]: value = "task-1788486" [ 1579.778342] env[62816]: _type = "Task" [ 1579.778342] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.788468] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788486, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.944857] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1579.960822] env[62816]: DEBUG oslo_vmware.api [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788484, 'name': ReconfigVM_Task, 'duration_secs': 0.41277} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.961805] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Reconfigured VM instance instance-0000001d to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1579.966269] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb7c4e0f-9870-4574-9bf9-2f02dfa1f8ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.982514] env[62816]: DEBUG oslo_vmware.api [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1579.982514] env[62816]: value = "task-1788487" [ 1579.982514] env[62816]: _type = "Task" [ 1579.982514] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.991123] env[62816]: DEBUG oslo_vmware.api [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788487, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.110353] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "ee543138-1c43-46c4-a512-1977fa5eb3c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.110656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.110841] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "ee543138-1c43-46c4-a512-1977fa5eb3c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.111034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.111203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.113592] env[62816]: INFO nova.compute.manager [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Terminating instance [ 1580.116092] env[62816]: DEBUG nova.compute.manager [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1580.116687] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1580.117148] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e093aae-45c5-410d-8c6c-70eecd283d14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.124915] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1580.125164] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eda28d15-823b-4c67-a9a7-fb6bb56830c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.131770] env[62816]: DEBUG oslo_vmware.api [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1580.131770] env[62816]: value = "task-1788488" [ 1580.131770] env[62816]: _type = "Task" [ 1580.131770] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.139752] env[62816]: DEBUG oslo_vmware.api [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788488, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.203910] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788485, 'name': PowerOffVM_Task, 'duration_secs': 0.289258} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.204272] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1580.205099] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d677cf-edb9-4f5d-9bd7-f0776ad22605 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.208513] env[62816]: DEBUG nova.scheduler.client.report [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1580.228991] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53b5b04-8c86-4c3e-81ba-6fb5f6d7813b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.288034] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788486, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069628} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.288292] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1580.289139] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2195b800-749c-4be2-99f5-3ffa9820f026 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.311356] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac/1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1580.311650] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-347b7b24-fc57-4679-8d3c-86885dec74aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.331975] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1580.331975] env[62816]: value = "task-1788489" [ 1580.331975] env[62816]: _type = "Task" [ 1580.331975] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.342275] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788489, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.419018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.419225] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.419408] env[62816]: DEBUG nova.network.neutron [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1580.474967] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.492117] env[62816]: DEBUG oslo_vmware.api [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788487, 'name': ReconfigVM_Task, 'duration_secs': 0.134971} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.492450] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371031', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'name': 'volume-ee7bf020-3b58-4597-a084-12bb888e6072', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '1c3392d3-cfb0-47c6-9366-8c363ad21297', 'attached_at': '', 'detached_at': '', 'volume_id': 'ee7bf020-3b58-4597-a084-12bb888e6072', 'serial': 'ee7bf020-3b58-4597-a084-12bb888e6072'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1580.641964] env[62816]: DEBUG oslo_vmware.api [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788488, 'name': PowerOffVM_Task, 'duration_secs': 0.226289} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.644048] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1580.644048] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1580.644048] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a5aabab-a308-4c52-ad48-ed621f7223bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.713448] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.560s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.713987] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1580.716820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.619s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.718326] env[62816]: INFO nova.compute.claims [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1580.740124] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1580.740304] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7f20b961-2d9a-4d56-9bc2-756e3ef8f345 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.749024] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1580.749024] env[62816]: value = "task-1788491" [ 1580.749024] env[62816]: _type = "Task" [ 1580.749024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.756711] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788491, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.842409] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788489, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.887456] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1580.887678] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1580.887863] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Deleting the datastore file [datastore1] ee543138-1c43-46c4-a512-1977fa5eb3c6 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1580.888156] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a15ee34-65f5-4300-81e1-40c0fcb53eee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.894958] env[62816]: DEBUG oslo_vmware.api [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1580.894958] env[62816]: value = "task-1788492" [ 1580.894958] env[62816]: _type = "Task" [ 1580.894958] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.902725] env[62816]: DEBUG oslo_vmware.api [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788492, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.036394] env[62816]: DEBUG nova.objects.instance [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1581.206619] env[62816]: DEBUG nova.network.neutron [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.222987] env[62816]: DEBUG nova.compute.utils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.226142] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1581.226142] env[62816]: DEBUG nova.network.neutron [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1581.258908] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788491, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.276840] env[62816]: DEBUG nova.policy [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f59998717d8246c5b238194d0d8f5cf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b1b175f09c47457ead5fff6d3ecf1cee', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1581.342913] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788489, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.406533] env[62816]: DEBUG oslo_vmware.api [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788492, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148475} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.406858] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1581.407103] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1581.407327] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1581.407541] env[62816]: INFO nova.compute.manager [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1581.407841] env[62816]: DEBUG oslo.service.loopingcall [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1581.408092] env[62816]: DEBUG nova.compute.manager [-] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1581.408220] env[62816]: DEBUG nova.network.neutron [-] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1581.710424] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.728699] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1581.750315] env[62816]: DEBUG nova.network.neutron [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Successfully created port: a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1581.761899] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788491, 'name': CreateSnapshot_Task, 'duration_secs': 0.521943} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.762261] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1581.763097] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7b53c7-28a9-48e0-8565-a8a9732d9452 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.832737] env[62816]: DEBUG nova.compute.manager [req-70f6ebf7-27c3-4b14-8760-0334ef3fad70 req-3021b705-4e75-43b3-806a-a67db06917cc service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Received event network-vif-deleted-5f921b79-e02e-4aa2-b074-89b96a7890ff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1581.832857] env[62816]: INFO nova.compute.manager [req-70f6ebf7-27c3-4b14-8760-0334ef3fad70 req-3021b705-4e75-43b3-806a-a67db06917cc service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Neutron deleted interface 5f921b79-e02e-4aa2-b074-89b96a7890ff; detaching it from the instance and deleting it from the info cache [ 1581.833043] env[62816]: DEBUG nova.network.neutron [req-70f6ebf7-27c3-4b14-8760-0334ef3fad70 req-3021b705-4e75-43b3-806a-a67db06917cc service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.845657] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788489, 'name': ReconfigVM_Task, 'duration_secs': 1.023535} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.848618] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac/1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1581.849415] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdb2ea69-a0e7-4ba0-b890-a39e79d5b1e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.858526] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1581.858526] env[62816]: value = "task-1788493" [ 1581.858526] env[62816]: _type = "Task" [ 1581.858526] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.866912] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788493, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.982811] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.045523] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d52701b-3667-42b3-9809-75ea9465d475 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.267s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.046578] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.064s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.046764] env[62816]: DEBUG nova.compute.manager [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1582.047776] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd092e05-1a29-402d-93df-d00dff514d44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.056252] env[62816]: DEBUG nova.compute.manager [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1582.056793] env[62816]: DEBUG nova.objects.instance [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1582.156179] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115aadd5-ede9-41dc-9d74-0547517430f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.163743] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7930acf5-1e39-4484-8c78-2738add6419b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.194167] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccaade49-dd37-4e98-8a9f-909ee798d8a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.201175] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e71f5a-e45d-4921-8f3a-b9706d0dbd6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.214164] env[62816]: DEBUG nova.compute.provider_tree [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1582.235702] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c304f1-276d-4394-9d7e-ee0382634b8f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.259018] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fa5b14-585a-4f7a-b53d-1f83e5b29382 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.266523] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1582.295574] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1582.296215] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f4baa220-9699-4289-a5ae-ea1fc13bd786 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.304864] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1582.304864] env[62816]: value = "task-1788494" [ 1582.304864] env[62816]: _type = "Task" [ 1582.304864] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.305138] env[62816]: DEBUG nova.network.neutron [-] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.316026] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788494, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.335716] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1673b558-1183-44fd-9743-ed4d58b16c68 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.345298] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764ba742-4245-4560-ae18-b9bf94dc6ccf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.367202] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788493, 'name': Rename_Task, 'duration_secs': 0.145329} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.380103] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1582.380470] env[62816]: DEBUG nova.compute.manager [req-70f6ebf7-27c3-4b14-8760-0334ef3fad70 req-3021b705-4e75-43b3-806a-a67db06917cc service nova] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Detach interface failed, port_id=5f921b79-e02e-4aa2-b074-89b96a7890ff, reason: Instance ee543138-1c43-46c4-a512-1977fa5eb3c6 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1582.380865] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dee0ae8c-7ed1-4131-b4a4-f0eb5b04cc54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.387490] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1582.387490] env[62816]: value = "task-1788495" [ 1582.387490] env[62816]: _type = "Task" [ 1582.387490] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.398035] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788495, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.562696] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1582.563071] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f12612b-3bce-415f-a3be-7ba26587dae0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.569779] env[62816]: DEBUG oslo_vmware.api [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1582.569779] env[62816]: value = "task-1788496" [ 1582.569779] env[62816]: _type = "Task" [ 1582.569779] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.577829] env[62816]: DEBUG oslo_vmware.api [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788496, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.717726] env[62816]: DEBUG nova.scheduler.client.report [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1582.744254] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1582.773221] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1582.773320] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6bc7626-f179-4f31-8ca9-5bd30578b4a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.777656] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1582.777907] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1582.778098] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1582.778287] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1582.778430] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1582.778575] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1582.778782] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1582.778939] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1582.779119] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1582.779326] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1582.779443] env[62816]: DEBUG nova.virt.hardware [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1582.780277] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f17088-cfdb-4987-ab9e-8e0bb6d11dcb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.794187] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c2c6ab-e250-4eda-af87-fe64954745a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.797430] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1582.797430] env[62816]: value = "task-1788497" [ 1582.797430] env[62816]: _type = "Task" [ 1582.797430] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.816036] env[62816]: INFO nova.compute.manager [-] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Took 1.41 seconds to deallocate network for instance. [ 1582.816572] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788497, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.824438] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788494, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.897679] env[62816]: DEBUG oslo_vmware.api [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788495, 'name': PowerOnVM_Task, 'duration_secs': 0.462553} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.899051] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1582.899051] env[62816]: INFO nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Took 9.22 seconds to spawn the instance on the hypervisor. [ 1582.899051] env[62816]: DEBUG nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1582.899413] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b7ac6f-01f2-4db5-b128-e6e10257252b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.080880] env[62816]: DEBUG oslo_vmware.api [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788496, 'name': PowerOffVM_Task, 'duration_secs': 0.175423} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.081181] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1583.081375] env[62816]: DEBUG nova.compute.manager [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1583.082222] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d23a8be-7500-49d0-8a90-acbd7d7d21b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.224341] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.224785] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1583.227365] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.539s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.227588] env[62816]: DEBUG nova.objects.instance [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lazy-loading 'resources' on Instance uuid ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1583.304096] env[62816]: DEBUG nova.network.neutron [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Successfully updated port: a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.308763] env[62816]: DEBUG oslo_vmware.api [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788497, 'name': PowerOnVM_Task, 'duration_secs': 0.369891} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.309208] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1583.309446] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3efcf216-99a6-4de3-9cc0-ccd3219c89a9 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance '9bda24c6-f950-47ff-ad3c-ff745291870c' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1583.322602] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788494, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.327160] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.420464] env[62816]: INFO nova.compute.manager [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Took 36.56 seconds to build instance. [ 1583.596237] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1abdb725-76bc-47cc-b689-01eb21ce31ff tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.549s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.732137] env[62816]: DEBUG nova.compute.utils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1583.732315] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1583.732495] env[62816]: DEBUG nova.network.neutron [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1583.790702] env[62816]: DEBUG nova.policy [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e0a516271d44d75bf851f2399fdccc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '234d83ee73c7469b83426b99c7c5ed2f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1583.810482] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.810482] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.810738] env[62816]: DEBUG nova.network.neutron [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1583.828459] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788494, 'name': CloneVM_Task, 'duration_secs': 1.343637} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.831551] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Created linked-clone VM from snapshot [ 1583.832497] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b6c116-17f5-4b49-b44d-601f4bf76428 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.842634] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Uploading image 6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1583.867168] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1583.867168] env[62816]: value = "vm-371059" [ 1583.867168] env[62816]: _type = "VirtualMachine" [ 1583.867168] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1583.867446] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-80146bcb-ac18-4220-b0d9-4fcc5853e138 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.876564] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lease: (returnval){ [ 1583.876564] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523d5390-af99-2bc9-9489-4d540c76ec5d" [ 1583.876564] env[62816]: _type = "HttpNfcLease" [ 1583.876564] env[62816]: } obtained for exporting VM: (result){ [ 1583.876564] env[62816]: value = "vm-371059" [ 1583.876564] env[62816]: _type = "VirtualMachine" [ 1583.876564] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1583.876767] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the lease: (returnval){ [ 1583.876767] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523d5390-af99-2bc9-9489-4d540c76ec5d" [ 1583.876767] env[62816]: _type = "HttpNfcLease" [ 1583.876767] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1583.885902] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1583.885902] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523d5390-af99-2bc9-9489-4d540c76ec5d" [ 1583.885902] env[62816]: _type = "HttpNfcLease" [ 1583.885902] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1583.925072] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b01b0632-927b-4000-b254-8f053008b750 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.074s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.990131] env[62816]: DEBUG nova.compute.manager [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Received event network-vif-plugged-a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1583.990131] env[62816]: DEBUG oslo_concurrency.lockutils [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1583.990131] env[62816]: DEBUG oslo_concurrency.lockutils [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1583.990131] env[62816]: DEBUG oslo_concurrency.lockutils [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.990131] env[62816]: DEBUG nova.compute.manager [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] No waiting events found dispatching network-vif-plugged-a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1583.990131] env[62816]: WARNING nova.compute.manager [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Received unexpected event network-vif-plugged-a4838985-0fbb-4554-a869-57339a03546a for instance with vm_state building and task_state spawning. [ 1583.991128] env[62816]: DEBUG nova.compute.manager [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Received event network-changed-a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1583.991458] env[62816]: DEBUG nova.compute.manager [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Refreshing instance network info cache due to event network-changed-a4838985-0fbb-4554-a869-57339a03546a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1583.991749] env[62816]: DEBUG oslo_concurrency.lockutils [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] Acquiring lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.148136] env[62816]: DEBUG nova.network.neutron [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Successfully created port: 56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1584.187526] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4ec41bc-0f35-4a3d-b8d5-3a7ea1406ec9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.195093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dbcdcc-30eb-4175-999b-fdfe519b9fa2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.225983] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-014f5a84-c0dc-4dd7-9049-784b1cd43c52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.233505] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84726e6d-8820-4093-bada-b3c1eafd8420 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.239242] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1584.251789] env[62816]: DEBUG nova.compute.provider_tree [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.369121] env[62816]: DEBUG nova.network.neutron [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1584.385019] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1584.385019] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523d5390-af99-2bc9-9489-4d540c76ec5d" [ 1584.385019] env[62816]: _type = "HttpNfcLease" [ 1584.385019] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1584.385325] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1584.385325] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523d5390-af99-2bc9-9489-4d540c76ec5d" [ 1584.385325] env[62816]: _type = "HttpNfcLease" [ 1584.385325] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1584.386115] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb4bf3-92ad-455e-93b4-8f28c54aa7f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.397602] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b02d7b-9294-d16e-6245-1e429292ed25/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1584.397786] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b02d7b-9294-d16e-6245-1e429292ed25/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1584.512703] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7984621d-4427-475c-b1cc-2de6bc4d8543 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.581614] env[62816]: DEBUG nova.objects.instance [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1584.597494] env[62816]: DEBUG nova.network.neutron [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updating instance_info_cache with network_info: [{"id": "a4838985-0fbb-4554-a869-57339a03546a", "address": "fa:16:3e:5c:33:57", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4838985-0f", "ovs_interfaceid": "a4838985-0fbb-4554-a869-57339a03546a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.758040] env[62816]: DEBUG nova.scheduler.client.report [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1584.896358] env[62816]: DEBUG nova.compute.manager [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1584.897634] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41e4e995-6d04-4201-8ed4-2abc5a2167e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.085198] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.085428] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.085614] env[62816]: DEBUG nova.network.neutron [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1585.085818] env[62816]: DEBUG nova.objects.instance [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'info_cache' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.100815] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Releasing lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.101055] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Instance network_info: |[{"id": "a4838985-0fbb-4554-a869-57339a03546a", "address": "fa:16:3e:5c:33:57", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4838985-0f", "ovs_interfaceid": "a4838985-0fbb-4554-a869-57339a03546a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1585.101367] env[62816]: DEBUG oslo_concurrency.lockutils [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] Acquired lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.101541] env[62816]: DEBUG nova.network.neutron [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Refreshing network info cache for port a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1585.103905] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:33:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b6a4065-12af-4fb9-ac47-ec9143f7297e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4838985-0fbb-4554-a869-57339a03546a', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.114301] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Creating folder: Project (b1b175f09c47457ead5fff6d3ecf1cee). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1585.119228] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62c8b17b-ce50-472e-a9ee-e7d622fa08b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.133483] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Created folder: Project (b1b175f09c47457ead5fff6d3ecf1cee) in parent group-v370905. [ 1585.133483] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Creating folder: Instances. Parent ref: group-v371060. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1585.134096] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b5f8868-ad41-4247-ad7f-80c1c35c7def {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.143867] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Created folder: Instances in parent group-v371060. [ 1585.144164] env[62816]: DEBUG oslo.service.loopingcall [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.144388] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1585.144726] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4b6a3e4-1df8-4a43-adef-ac5d91922f5e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.166472] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.166472] env[62816]: value = "task-1788501" [ 1585.166472] env[62816]: _type = "Task" [ 1585.166472] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.176877] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788501, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.263081] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.267543] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1585.273311] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.335s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.274990] env[62816]: INFO nova.compute.claims [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1585.303750] env[62816]: INFO nova.scheduler.client.report [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Deleted allocations for instance ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056 [ 1585.313645] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1585.314079] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1585.314258] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1585.314569] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1585.314800] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1585.315115] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1585.315389] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1585.315605] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1585.315781] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1585.316046] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1585.316336] env[62816]: DEBUG nova.virt.hardware [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1585.317985] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8038ad-ec45-4298-be4a-3cb12d34c393 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.327487] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f171802c-7cf7-4bda-a86c-138c218fbd17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.409615] env[62816]: INFO nova.compute.manager [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] instance snapshotting [ 1585.413597] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998e7f4f-0a00-457d-8e1a-170d5bbf3ada {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.445081] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da0cb97-e03b-4b51-ab55-d1c64858d45f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.516217] env[62816]: DEBUG nova.network.neutron [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updated VIF entry in instance network info cache for port a4838985-0fbb-4554-a869-57339a03546a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1585.516686] env[62816]: DEBUG nova.network.neutron [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updating instance_info_cache with network_info: [{"id": "a4838985-0fbb-4554-a869-57339a03546a", "address": "fa:16:3e:5c:33:57", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4838985-0f", "ovs_interfaceid": "a4838985-0fbb-4554-a869-57339a03546a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.590694] env[62816]: DEBUG nova.objects.base [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Object Instance<1c3392d3-cfb0-47c6-9366-8c363ad21297> lazy-loaded attributes: flavor,info_cache {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1585.680376] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788501, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.814312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b23350e7-732c-4cf5-b9dd-4ebe31c009bc tempest-ServerDiagnosticsNegativeTest-809779532 tempest-ServerDiagnosticsNegativeTest-809779532-project-member] Lock "ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.656s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.943489] env[62816]: DEBUG nova.network.neutron [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Port 5b2b9d44-f66e-428f-a75c-6e213ebdb364 binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1585.943770] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.943920] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.944147] env[62816]: DEBUG nova.network.neutron [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1585.958554] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1585.959235] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-72b13f0c-6573-4879-97c9-4d99311519e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.967985] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1585.967985] env[62816]: value = "task-1788502" [ 1585.967985] env[62816]: _type = "Task" [ 1585.967985] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.977171] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788502, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.023871] env[62816]: DEBUG oslo_concurrency.lockutils [req-17869e67-fc08-4fa1-aaa0-7f99ff6c1565 req-7ef1d2b7-6d89-4168-8e62-5539c1da6705 service nova] Releasing lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.025713] env[62816]: DEBUG nova.compute.manager [req-74be1a14-c9a0-4db2-8743-9dc36876d0e5 req-9b34a728-a285-4916-bf1f-b4077c15ec44 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Received event network-vif-plugged-56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1586.026362] env[62816]: DEBUG oslo_concurrency.lockutils [req-74be1a14-c9a0-4db2-8743-9dc36876d0e5 req-9b34a728-a285-4916-bf1f-b4077c15ec44 service nova] Acquiring lock "d34b7828-542e-4b66-a923-644d0d0f4866-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.026401] env[62816]: DEBUG oslo_concurrency.lockutils [req-74be1a14-c9a0-4db2-8743-9dc36876d0e5 req-9b34a728-a285-4916-bf1f-b4077c15ec44 service nova] Lock "d34b7828-542e-4b66-a923-644d0d0f4866-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.026570] env[62816]: DEBUG oslo_concurrency.lockutils [req-74be1a14-c9a0-4db2-8743-9dc36876d0e5 req-9b34a728-a285-4916-bf1f-b4077c15ec44 service nova] Lock "d34b7828-542e-4b66-a923-644d0d0f4866-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.026823] env[62816]: DEBUG nova.compute.manager [req-74be1a14-c9a0-4db2-8743-9dc36876d0e5 req-9b34a728-a285-4916-bf1f-b4077c15ec44 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] No waiting events found dispatching network-vif-plugged-56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1586.027132] env[62816]: WARNING nova.compute.manager [req-74be1a14-c9a0-4db2-8743-9dc36876d0e5 req-9b34a728-a285-4916-bf1f-b4077c15ec44 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Received unexpected event network-vif-plugged-56c6faa5-b2b2-42d5-85d9-dd995a578b48 for instance with vm_state building and task_state spawning. [ 1586.178919] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788501, 'name': CreateVM_Task, 'duration_secs': 0.524404} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.179210] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.179948] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.180227] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.180729] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.181185] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd449b01-dc64-4351-b9f6-fa84eb4bb055 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.187473] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1586.187473] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52975cea-d152-8652-a13c-1974b33cd2bb" [ 1586.187473] env[62816]: _type = "Task" [ 1586.187473] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.195956] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52975cea-d152-8652-a13c-1974b33cd2bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.231039] env[62816]: DEBUG nova.network.neutron [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Successfully updated port: 56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1586.444733] env[62816]: DEBUG nova.network.neutron [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [{"id": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "address": "fa:16:3e:d6:81:d5", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1110b9ce-76", "ovs_interfaceid": "1110b9ce-766b-4ab4-b75f-4e0139f78297", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.479177] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788502, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.704109] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52975cea-d152-8652-a13c-1974b33cd2bb, 'name': SearchDatastore_Task, 'duration_secs': 0.016416} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.707099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.707363] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1586.707602] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.707749] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.707937] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1586.708436] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeff4181-856d-47ad-9637-235bc3a0d63b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.716756] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1586.716951] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1586.717792] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d80ab2f8-b58f-4082-8d8e-133c4d57e38a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.723141] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1586.723141] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52bda68f-cff6-777e-787d-7fb21748fb99" [ 1586.723141] env[62816]: _type = "Task" [ 1586.723141] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.734406] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bda68f-cff6-777e-787d-7fb21748fb99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.734406] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.734406] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquired lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.734406] env[62816]: DEBUG nova.network.neutron [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1586.798170] env[62816]: DEBUG nova.network.neutron [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.863518] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fafafef-5f55-41df-9055-d9e1175798ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.871450] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c525cd28-a1db-47dd-82e1-6467a3b73f2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.904535] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4562af99-a8c9-48a3-a8df-4dc7ccc3ff61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.912623] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3948b0f7-78eb-4083-b449-1524a9a4212c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.926379] env[62816]: DEBUG nova.compute.provider_tree [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.951203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "refresh_cache-1c3392d3-cfb0-47c6-9366-8c363ad21297" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.982280] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788502, 'name': CreateSnapshot_Task, 'duration_secs': 0.852848} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.982988] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1586.984289] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f31682-359d-4f63-83ec-ae889cfae9a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.234021] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bda68f-cff6-777e-787d-7fb21748fb99, 'name': SearchDatastore_Task, 'duration_secs': 0.011364} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.234835] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c251848-d7fb-4d99-a520-827f27a0337f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.242991] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1587.242991] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d0fc36-dfd0-0a93-6017-7915e184b610" [ 1587.242991] env[62816]: _type = "Task" [ 1587.242991] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.251630] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d0fc36-dfd0-0a93-6017-7915e184b610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.270555] env[62816]: DEBUG nova.network.neutron [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1587.300452] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.429756] env[62816]: DEBUG nova.scheduler.client.report [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1587.433784] env[62816]: DEBUG nova.network.neutron [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Updating instance_info_cache with network_info: [{"id": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "address": "fa:16:3e:fb:ea:f2", "network": {"id": "4467952d-9ee5-4f34-a2e6-cf130475f861", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-739429624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "234d83ee73c7469b83426b99c7c5ed2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6faa5-b2", "ovs_interfaceid": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.454776] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1587.455158] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-505603ef-a427-4e31-becb-90e6e2d14976 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.463697] env[62816]: DEBUG oslo_vmware.api [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1587.463697] env[62816]: value = "task-1788503" [ 1587.463697] env[62816]: _type = "Task" [ 1587.463697] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.472362] env[62816]: DEBUG oslo_vmware.api [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788503, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.502824] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1587.503090] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-63ca2587-33bc-465f-b510-009e9848098e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.512986] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1587.512986] env[62816]: value = "task-1788504" [ 1587.512986] env[62816]: _type = "Task" [ 1587.512986] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.521530] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788504, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.754649] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d0fc36-dfd0-0a93-6017-7915e184b610, 'name': SearchDatastore_Task, 'duration_secs': 0.015087} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.755043] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.755322] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 8105e650-8482-40c6-bd7a-b8daea19a0d5/8105e650-8482-40c6-bd7a-b8daea19a0d5.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1587.755622] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b034fcb-efb9-42c8-ab2e-3554c47179af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.765358] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1587.765358] env[62816]: value = "task-1788505" [ 1587.765358] env[62816]: _type = "Task" [ 1587.765358] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.775941] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788505, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.804385] env[62816]: DEBUG nova.compute.manager [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62816) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1587.804663] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.936173] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.663s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.936748] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1587.939689] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Releasing lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.940837] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Instance network_info: |[{"id": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "address": "fa:16:3e:fb:ea:f2", "network": {"id": "4467952d-9ee5-4f34-a2e6-cf130475f861", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-739429624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "234d83ee73c7469b83426b99c7c5ed2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6faa5-b2", "ovs_interfaceid": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1587.940837] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.283s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.940837] env[62816]: DEBUG nova.objects.instance [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lazy-loading 'resources' on Instance uuid 11a4d835-c149-49f0-8e4f-b3f9a7f1afca {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1587.941929] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:ea:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56c6faa5-b2b2-42d5-85d9-dd995a578b48', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1587.951202] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Creating folder: Project (234d83ee73c7469b83426b99c7c5ed2f). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1587.951822] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c7a4669-d2e2-478e-82f5-af6dfe00a10d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.963934] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Created folder: Project (234d83ee73c7469b83426b99c7c5ed2f) in parent group-v370905. [ 1587.964184] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Creating folder: Instances. Parent ref: group-v371065. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1587.965016] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ecb45129-8e51-45da-adf5-6fcf8ae9651b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.980883] env[62816]: DEBUG oslo_vmware.api [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788503, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.982560] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Created folder: Instances in parent group-v371065. [ 1587.982813] env[62816]: DEBUG oslo.service.loopingcall [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1587.983024] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1587.983262] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bc89f3c-141f-4c6e-b847-5565c32bd39b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.006051] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1588.006051] env[62816]: value = "task-1788508" [ 1588.006051] env[62816]: _type = "Task" [ 1588.006051] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.015690] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788508, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.026833] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788504, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.122228] env[62816]: DEBUG nova.compute.manager [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Received event network-changed-56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1588.122228] env[62816]: DEBUG nova.compute.manager [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Refreshing instance network info cache due to event network-changed-56c6faa5-b2b2-42d5-85d9-dd995a578b48. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1588.122304] env[62816]: DEBUG oslo_concurrency.lockutils [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] Acquiring lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.122442] env[62816]: DEBUG oslo_concurrency.lockutils [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] Acquired lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.122609] env[62816]: DEBUG nova.network.neutron [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Refreshing network info cache for port 56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1588.276583] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788505, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.443152] env[62816]: DEBUG nova.compute.utils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1588.444688] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1588.444913] env[62816]: DEBUG nova.network.neutron [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1588.476772] env[62816]: DEBUG oslo_vmware.api [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788503, 'name': PowerOnVM_Task, 'duration_secs': 0.715573} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.477121] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1588.477367] env[62816]: DEBUG nova.compute.manager [None req-1634153d-a4f0-414f-baff-4cd06154f732 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1588.481217] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93daeaa0-c06e-4636-a6e4-ed74da44bf4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.516465] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788508, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.524521] env[62816]: DEBUG nova.policy [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21ed3abad90741799db9f998a15c7787', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f016ab6a03848ba8014647f483f0b92', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1588.529525] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788504, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.777357] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788505, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625627} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.777719] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 8105e650-8482-40c6-bd7a-b8daea19a0d5/8105e650-8482-40c6-bd7a-b8daea19a0d5.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1588.777981] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1588.778261] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d2f50af-a171-4073-9f2e-df93e719fd77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.785350] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1588.785350] env[62816]: value = "task-1788509" [ 1588.785350] env[62816]: _type = "Task" [ 1588.785350] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.794187] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.927946] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eaa053d-3310-44dc-a7d4-da9c7984fd1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.936216] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973a2655-3a41-4a5e-af62-c44ed3f5cc57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.971211] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1588.974651] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2340ce9-fbf0-43a8-9093-b1fde547901e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.983492] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ece0219-3dd5-40ae-a60d-b2599a22c257 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.001627] env[62816]: DEBUG nova.compute.provider_tree [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.017600] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788508, 'name': CreateVM_Task, 'duration_secs': 0.799384} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.021115] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1589.022143] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.022322] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.022633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1589.026247] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bc22103-4f72-4482-8870-ba90f23598df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.030435] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788504, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.034314] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1589.034314] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527f6c90-5d0c-5a84-434d-a29f2fe617bc" [ 1589.034314] env[62816]: _type = "Task" [ 1589.034314] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.042715] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527f6c90-5d0c-5a84-434d-a29f2fe617bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.230967] env[62816]: DEBUG nova.network.neutron [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Updated VIF entry in instance network info cache for port 56c6faa5-b2b2-42d5-85d9-dd995a578b48. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.231356] env[62816]: DEBUG nova.network.neutron [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Updating instance_info_cache with network_info: [{"id": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "address": "fa:16:3e:fb:ea:f2", "network": {"id": "4467952d-9ee5-4f34-a2e6-cf130475f861", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-739429624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "234d83ee73c7469b83426b99c7c5ed2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6faa5-b2", "ovs_interfaceid": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.275818] env[62816]: DEBUG nova.network.neutron [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Successfully created port: d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1589.296182] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.156042} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.296463] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1589.297259] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da51546-65cf-47e8-87cc-7795707bae99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.324334] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 8105e650-8482-40c6-bd7a-b8daea19a0d5/8105e650-8482-40c6-bd7a-b8daea19a0d5.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.324666] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d1bd5fd-dcd3-46b7-af78-5d44bdf437c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.344707] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1589.344707] env[62816]: value = "task-1788510" [ 1589.344707] env[62816]: _type = "Task" [ 1589.344707] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.353444] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788510, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.507985] env[62816]: DEBUG nova.scheduler.client.report [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1589.529026] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788504, 'name': CloneVM_Task, 'duration_secs': 1.809378} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.530028] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Created linked-clone VM from snapshot [ 1589.530969] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60475f1c-1438-4929-a93d-6fba5c6f35a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.547193] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Uploading image 3dbb4887-cb05-4553-b496-bc3e99336442 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1589.553821] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527f6c90-5d0c-5a84-434d-a29f2fe617bc, 'name': SearchDatastore_Task, 'duration_secs': 0.017679} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.554531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.554591] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1589.554957] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.555190] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.555480] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1589.555726] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-045b846c-d8a2-48b8-9f6d-121d401ddc58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.566127] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1589.566354] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1589.567138] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d49efa6-a2f0-4909-b1d1-497de78eb42a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.574922] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1589.574922] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d8df98-64ad-6c9f-5a80-e980cdfd6981" [ 1589.574922] env[62816]: _type = "Task" [ 1589.574922] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.580053] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1589.580053] env[62816]: value = "vm-371064" [ 1589.580053] env[62816]: _type = "VirtualMachine" [ 1589.580053] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1589.580565] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1176ef46-c8be-4558-ba40-2866f424eda1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.587472] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d8df98-64ad-6c9f-5a80-e980cdfd6981, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.588976] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease: (returnval){ [ 1589.588976] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52062fe8-d5e0-4a61-9bd2-3eb1d012015c" [ 1589.588976] env[62816]: _type = "HttpNfcLease" [ 1589.588976] env[62816]: } obtained for exporting VM: (result){ [ 1589.588976] env[62816]: value = "vm-371064" [ 1589.588976] env[62816]: _type = "VirtualMachine" [ 1589.588976] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1589.589317] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the lease: (returnval){ [ 1589.589317] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52062fe8-d5e0-4a61-9bd2-3eb1d012015c" [ 1589.589317] env[62816]: _type = "HttpNfcLease" [ 1589.589317] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1589.595889] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1589.595889] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52062fe8-d5e0-4a61-9bd2-3eb1d012015c" [ 1589.595889] env[62816]: _type = "HttpNfcLease" [ 1589.595889] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1589.733837] env[62816]: DEBUG oslo_concurrency.lockutils [req-ede2ed37-47f3-4e1b-a229-f55e61d599df req-1f3d28b8-a650-4392-be2b-ba4ac59ae6bd service nova] Releasing lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.855192] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788510, 'name': ReconfigVM_Task, 'duration_secs': 0.338512} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.855633] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 8105e650-8482-40c6-bd7a-b8daea19a0d5/8105e650-8482-40c6-bd7a-b8daea19a0d5.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.856352] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5147b4c5-06c9-4899-8a2b-516614159d08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.863598] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1589.863598] env[62816]: value = "task-1788512" [ 1589.863598] env[62816]: _type = "Task" [ 1589.863598] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.872438] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788512, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.984912] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1590.009481] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1590.011118] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1590.011483] env[62816]: DEBUG nova.virt.hardware [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1590.012254] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2098f1d0-0709-450d-9d0a-4f304526e169 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.015500] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.075s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.018043] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.949s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.018043] env[62816]: DEBUG nova.objects.instance [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lazy-loading 'resources' on Instance uuid 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1590.025187] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40331bc8-7401-4fa3-8fe3-0a8bc5a54d93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.049286] env[62816]: INFO nova.scheduler.client.report [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Deleted allocations for instance 11a4d835-c149-49f0-8e4f-b3f9a7f1afca [ 1590.088631] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d8df98-64ad-6c9f-5a80-e980cdfd6981, 'name': SearchDatastore_Task, 'duration_secs': 0.015845} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.089666] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ed939df-ac1a-4b3a-a0e6-28baa6382887 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.098459] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1590.098459] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5243c175-edf3-41b9-4dce-fa13c689f277" [ 1590.098459] env[62816]: _type = "Task" [ 1590.098459] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.102766] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1590.102766] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52062fe8-d5e0-4a61-9bd2-3eb1d012015c" [ 1590.102766] env[62816]: _type = "HttpNfcLease" [ 1590.102766] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1590.103555] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1590.103555] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52062fe8-d5e0-4a61-9bd2-3eb1d012015c" [ 1590.103555] env[62816]: _type = "HttpNfcLease" [ 1590.103555] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1590.104363] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aa2173-0701-4169-938b-5274e47b5db8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.116766] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabd92-5662-25ba-16a4-93b7b09e6e8f/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1590.116990] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabd92-5662-25ba-16a4-93b7b09e6e8f/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1590.119682] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5243c175-edf3-41b9-4dce-fa13c689f277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.228268] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9373576b-eedf-4954-8773-08ed18feca89 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.375021] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788512, 'name': Rename_Task, 'duration_secs': 0.141138} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.375383] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1590.375672] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-707dbbca-dd84-4494-9d3f-14c52e945a8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.382907] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1590.382907] env[62816]: value = "task-1788513" [ 1590.382907] env[62816]: _type = "Task" [ 1590.382907] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.394889] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.556896] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2287d4aa-a4c2-432d-9c59-0e02ddc474cb tempest-SecurityGroupsTestJSON-1587406018 tempest-SecurityGroupsTestJSON-1587406018-project-member] Lock "11a4d835-c149-49f0-8e4f-b3f9a7f1afca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.375s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.614897] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5243c175-edf3-41b9-4dce-fa13c689f277, 'name': SearchDatastore_Task, 'duration_secs': 0.014374} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.615441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.616712] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d34b7828-542e-4b66-a923-644d0d0f4866/d34b7828-542e-4b66-a923-644d0d0f4866.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1590.617610] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95e18f6d-e89f-4608-a6ef-83cc0c6d96d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.627162] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1590.627162] env[62816]: value = "task-1788514" [ 1590.627162] env[62816]: _type = "Task" [ 1590.627162] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.636165] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788514, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.899630] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788513, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.143981] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788514, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.167360] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-802a165b-9079-40f4-8afa-9b3e476d9f45 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.176748] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87218864-b47f-4e63-b20d-cf0d5fb134bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.213685] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab0225c-1100-4c31-a9ce-5dc275a1d8c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.222205] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ead4c5b8-a674-4575-8848-1179b371a98e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.242241] env[62816]: DEBUG nova.compute.provider_tree [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1591.394454] env[62816]: DEBUG oslo_vmware.api [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788513, 'name': PowerOnVM_Task, 'duration_secs': 0.538306} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.394760] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1591.398021] env[62816]: INFO nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1591.398021] env[62816]: DEBUG nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1591.398021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529b5d81-0bcf-43dd-a77a-16b2b8a96a7f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.559153] env[62816]: DEBUG nova.network.neutron [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Successfully updated port: d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1591.639533] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788514, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638912} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.639851] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d34b7828-542e-4b66-a923-644d0d0f4866/d34b7828-542e-4b66-a923-644d0d0f4866.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1591.640264] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1591.640779] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c6845e8d-1d00-418f-9670-c8a339ebaf86 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.653762] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1591.653762] env[62816]: value = "task-1788515" [ 1591.653762] env[62816]: _type = "Task" [ 1591.653762] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.663658] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788515, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.746928] env[62816]: DEBUG nova.scheduler.client.report [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1591.921018] env[62816]: INFO nova.compute.manager [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Took 43.16 seconds to build instance. [ 1591.981616] env[62816]: DEBUG nova.compute.manager [req-bfe35b06-4b4b-4820-aef6-327e7ba34979 req-f6db2c82-7daa-46be-ade7-20f5e95d8ba9 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Received event network-vif-plugged-d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1591.981847] env[62816]: DEBUG oslo_concurrency.lockutils [req-bfe35b06-4b4b-4820-aef6-327e7ba34979 req-f6db2c82-7daa-46be-ade7-20f5e95d8ba9 service nova] Acquiring lock "65e97c6a-5d8f-4241-9095-65a5a6132a69-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.982851] env[62816]: DEBUG oslo_concurrency.lockutils [req-bfe35b06-4b4b-4820-aef6-327e7ba34979 req-f6db2c82-7daa-46be-ade7-20f5e95d8ba9 service nova] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.983369] env[62816]: DEBUG oslo_concurrency.lockutils [req-bfe35b06-4b4b-4820-aef6-327e7ba34979 req-f6db2c82-7daa-46be-ade7-20f5e95d8ba9 service nova] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.983369] env[62816]: DEBUG nova.compute.manager [req-bfe35b06-4b4b-4820-aef6-327e7ba34979 req-f6db2c82-7daa-46be-ade7-20f5e95d8ba9 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] No waiting events found dispatching network-vif-plugged-d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1591.983466] env[62816]: WARNING nova.compute.manager [req-bfe35b06-4b4b-4820-aef6-327e7ba34979 req-f6db2c82-7daa-46be-ade7-20f5e95d8ba9 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Received unexpected event network-vif-plugged-d7d10695-86f5-4fee-b062-9934fa07e003 for instance with vm_state building and task_state spawning. [ 1592.052333] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b02d7b-9294-d16e-6245-1e429292ed25/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1592.053674] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576e09e2-a1ad-43cb-96ff-4db3b72fb849 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.060600] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b02d7b-9294-d16e-6245-1e429292ed25/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1592.061114] env[62816]: ERROR oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b02d7b-9294-d16e-6245-1e429292ed25/disk-0.vmdk due to incomplete transfer. [ 1592.061114] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2323d862-2a88-4491-8282-da9f59cad154 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.062898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.063050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.063201] env[62816]: DEBUG nova.network.neutron [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1592.071011] env[62816]: DEBUG oslo_vmware.rw_handles [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52b02d7b-9294-d16e-6245-1e429292ed25/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1592.071011] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Uploaded image 6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1592.072742] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1592.073496] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ed6db214-b726-47da-9389-40d47259e8fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.080578] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1592.080578] env[62816]: value = "task-1788516" [ 1592.080578] env[62816]: _type = "Task" [ 1592.080578] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.089510] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788516, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.164430] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788515, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081699} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.164748] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1592.166170] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c34547-5e24-4201-82fd-35e505df8615 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.191335] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] d34b7828-542e-4b66-a923-644d0d0f4866/d34b7828-542e-4b66-a923-644d0d0f4866.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1592.191675] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7971a18-fa88-454e-a291-e226ebd4c6f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.215708] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1592.215708] env[62816]: value = "task-1788517" [ 1592.215708] env[62816]: _type = "Task" [ 1592.215708] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.225150] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788517, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.256991] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.239s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.260020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.895s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.261679] env[62816]: INFO nova.compute.claims [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1592.300017] env[62816]: INFO nova.scheduler.client.report [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Deleted allocations for instance 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859 [ 1592.423133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-14f0ffab-ead4-40f8-8732-29c599ddeda4 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.677s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.594058] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788516, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.623452] env[62816]: DEBUG nova.network.neutron [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1592.731147] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788517, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.810067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4f0b379-b6d8-4575-a867-e3ecc2e706b5 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "4fd2da5f-2867-4eeb-b7ab-8ffd7b096859" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.939s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.003285] env[62816]: DEBUG nova.network.neutron [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Updating instance_info_cache with network_info: [{"id": "d7d10695-86f5-4fee-b062-9934fa07e003", "address": "fa:16:3e:c2:33:f6", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d10695-86", "ovs_interfaceid": "d7d10695-86f5-4fee-b062-9934fa07e003", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.103411] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788516, 'name': Destroy_Task, 'duration_secs': 0.60872} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.103900] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Destroyed the VM [ 1593.104330] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1593.104602] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9f5b4d82-25e8-4ba7-bb7d-69b04f76be15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.112197] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1593.112197] env[62816]: value = "task-1788518" [ 1593.112197] env[62816]: _type = "Task" [ 1593.112197] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.122349] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788518, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.151353] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "9c246982-b215-46c1-9cd3-63907a515086" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.151689] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "9c246982-b215-46c1-9cd3-63907a515086" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.231067] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788517, 'name': ReconfigVM_Task, 'duration_secs': 0.807889} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.235021] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Reconfigured VM instance instance-00000037 to attach disk [datastore1] d34b7828-542e-4b66-a923-644d0d0f4866/d34b7828-542e-4b66-a923-644d0d0f4866.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1593.235021] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0b31971-ee84-4c7f-ad58-e2838bccf764 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.241627] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1593.241627] env[62816]: value = "task-1788519" [ 1593.241627] env[62816]: _type = "Task" [ 1593.241627] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.253010] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788519, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.452407] env[62816]: DEBUG nova.compute.manager [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Received event network-changed-a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1593.452407] env[62816]: DEBUG nova.compute.manager [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Refreshing instance network info cache due to event network-changed-a4838985-0fbb-4554-a869-57339a03546a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1593.452407] env[62816]: DEBUG oslo_concurrency.lockutils [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] Acquiring lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.453628] env[62816]: DEBUG oslo_concurrency.lockutils [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] Acquired lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.453628] env[62816]: DEBUG nova.network.neutron [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Refreshing network info cache for port a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1593.512097] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.512916] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Instance network_info: |[{"id": "d7d10695-86f5-4fee-b062-9934fa07e003", "address": "fa:16:3e:c2:33:f6", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d10695-86", "ovs_interfaceid": "d7d10695-86f5-4fee-b062-9934fa07e003", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1593.513645] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:33:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd7d10695-86f5-4fee-b062-9934fa07e003', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1593.522274] env[62816]: DEBUG oslo.service.loopingcall [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1593.523535] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1593.525969] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35231ad5-104e-4348-87b5-7e239e88e3c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.551604] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1593.551604] env[62816]: value = "task-1788520" [ 1593.551604] env[62816]: _type = "Task" [ 1593.551604] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.567632] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788520, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.624638] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788518, 'name': RemoveSnapshot_Task, 'duration_secs': 0.35419} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.624638] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1593.624638] env[62816]: DEBUG nova.compute.manager [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1593.625261] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8223ebf-15ac-4ca5-a8b5-409c7234b5de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.655981] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1593.752790] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788519, 'name': Rename_Task, 'duration_secs': 0.22369} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.753403] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1593.753891] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3d839fe3-f489-45ce-a692-429d2c4e4d1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.766264] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1593.766264] env[62816]: value = "task-1788521" [ 1593.766264] env[62816]: _type = "Task" [ 1593.766264] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.778082] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.811138] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00425a13-010e-4c49-80ca-1cd7b909674a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.818562] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad7658f-8eee-4c41-8b97-009c2f4e2fde {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.851886] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeedb0a8-c262-4d6c-a283-e59ad01c226a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.859764] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c122f12-6e1f-4b99-bd9a-c7e48c1c2ac3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.601891] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "e1067d45-1938-4021-b902-21a1aa57058a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.602156] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "e1067d45-1938-4021-b902-21a1aa57058a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.602364] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "e1067d45-1938-4021-b902-21a1aa57058a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.602540] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "e1067d45-1938-4021-b902-21a1aa57058a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.602701] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "e1067d45-1938-4021-b902-21a1aa57058a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.608645] env[62816]: DEBUG nova.compute.manager [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Received event network-changed-d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1594.608791] env[62816]: DEBUG nova.compute.manager [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Refreshing instance network info cache due to event network-changed-d7d10695-86f5-4fee-b062-9934fa07e003. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1594.608986] env[62816]: DEBUG oslo_concurrency.lockutils [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] Acquiring lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.609141] env[62816]: DEBUG oslo_concurrency.lockutils [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] Acquired lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.609457] env[62816]: DEBUG nova.network.neutron [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Refreshing network info cache for port d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1594.610438] env[62816]: INFO nova.compute.manager [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Terminating instance [ 1594.611704] env[62816]: INFO nova.compute.manager [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Shelve offloading [ 1594.620094] env[62816]: DEBUG nova.compute.manager [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1594.620701] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1594.620701] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1594.624041] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f5fd57-0cf7-48d6-9136-9722d69a5caa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.626962] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e27a7f4-29b3-40ea-a9ed-ea43f164cf6c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.637672] env[62816]: DEBUG nova.compute.provider_tree [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1594.646333] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788520, 'name': CreateVM_Task, 'duration_secs': 0.599365} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.646578] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788521, 'name': PowerOnVM_Task} progress is 81%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.647716] env[62816]: DEBUG nova.scheduler.client.report [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1594.650995] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1594.655346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.655346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.655465] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1594.655916] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1594.656443] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1594.656443] env[62816]: value = "task-1788522" [ 1594.656443] env[62816]: _type = "Task" [ 1594.656443] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.656633] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7a45c5b-902a-4b2c-9d2c-4fb8811b549f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.658606] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f8bcf40-9f5a-4286-98ba-9a869553a18e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.666904] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1594.666904] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527702be-1ddc-eeb5-78b8-e55222d35be8" [ 1594.666904] env[62816]: _type = "Task" [ 1594.666904] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.676661] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1594.676979] env[62816]: DEBUG nova.compute.manager [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1594.677477] env[62816]: DEBUG oslo_vmware.api [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1594.677477] env[62816]: value = "task-1788523" [ 1594.677477] env[62816]: _type = "Task" [ 1594.677477] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.678665] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10705509-cc30-4f8b-98fe-ce8d6043b6b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.688277] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527702be-1ddc-eeb5-78b8-e55222d35be8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.689297] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.691291] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.691455] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.691626] env[62816]: DEBUG nova.network.neutron [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1594.695591] env[62816]: DEBUG oslo_vmware.api [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.119777] env[62816]: DEBUG oslo_vmware.api [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788521, 'name': PowerOnVM_Task, 'duration_secs': 1.15013} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.120101] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1595.120313] env[62816]: INFO nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Took 9.85 seconds to spawn the instance on the hypervisor. [ 1595.120482] env[62816]: DEBUG nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1595.121322] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3029709e-0566-4706-8218-df9390869ae2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.155568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.895s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.156483] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1595.159156] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.541s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.159548] env[62816]: DEBUG nova.objects.instance [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lazy-loading 'resources' on Instance uuid 83f7b5b8-228b-4d17-ab52-8df65fe247e3 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1595.187428] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527702be-1ddc-eeb5-78b8-e55222d35be8, 'name': SearchDatastore_Task, 'duration_secs': 0.020411} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.191634] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.191854] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1595.192727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.192727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.192727] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1595.192965] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e4216ea-4c98-4d10-8b53-00196d294eb0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.203987] env[62816]: DEBUG oslo_vmware.api [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788523, 'name': PowerOffVM_Task, 'duration_secs': 0.344917} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.203987] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1595.203987] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1595.203987] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-380b4eb5-b02d-433c-b298-95b1e689a781 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.206700] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.206864] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1595.207918] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b0f3bd6-55c8-4ae4-b3bb-516185cf37ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.213275] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1595.213275] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523866da-412d-4530-af76-badc9527bd71" [ 1595.213275] env[62816]: _type = "Task" [ 1595.213275] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.221219] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523866da-412d-4530-af76-badc9527bd71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.227039] env[62816]: DEBUG nova.network.neutron [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Updated VIF entry in instance network info cache for port d7d10695-86f5-4fee-b062-9934fa07e003. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1595.227391] env[62816]: DEBUG nova.network.neutron [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Updating instance_info_cache with network_info: [{"id": "d7d10695-86f5-4fee-b062-9934fa07e003", "address": "fa:16:3e:c2:33:f6", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d10695-86", "ovs_interfaceid": "d7d10695-86f5-4fee-b062-9934fa07e003", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.273660] env[62816]: DEBUG nova.network.neutron [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updated VIF entry in instance network info cache for port a4838985-0fbb-4554-a869-57339a03546a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1595.273960] env[62816]: DEBUG nova.network.neutron [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updating instance_info_cache with network_info: [{"id": "a4838985-0fbb-4554-a869-57339a03546a", "address": "fa:16:3e:5c:33:57", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4838985-0f", "ovs_interfaceid": "a4838985-0fbb-4554-a869-57339a03546a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.399505] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1595.403123] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1595.403123] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Deleting the datastore file [datastore1] e1067d45-1938-4021-b902-21a1aa57058a {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1595.403123] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca0f3435-cbd1-443e-a478-1ed6bfb151ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.408824] env[62816]: DEBUG oslo_vmware.api [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for the task: (returnval){ [ 1595.408824] env[62816]: value = "task-1788525" [ 1595.408824] env[62816]: _type = "Task" [ 1595.408824] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.422757] env[62816]: DEBUG oslo_vmware.api [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.501585] env[62816]: DEBUG nova.network.neutron [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.638493] env[62816]: INFO nova.compute.manager [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Took 44.56 seconds to build instance. [ 1595.665389] env[62816]: DEBUG nova.compute.utils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1595.666787] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1595.667088] env[62816]: DEBUG nova.network.neutron [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1595.727750] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523866da-412d-4530-af76-badc9527bd71, 'name': SearchDatastore_Task, 'duration_secs': 0.025078} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.728613] env[62816]: DEBUG nova.policy [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a59c608ab954a3ba9cd61a84f30b89f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c54ea5a5abf4f0298b76f6081de8e60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1595.732475] env[62816]: DEBUG oslo_concurrency.lockutils [req-a5c8a4b0-48a0-411b-9a55-8d2664f6a071 req-12e8ccbb-c1b7-4874-844a-5249de7e8f70 service nova] Releasing lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.732475] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8239c72-8b83-4fbd-a368-fc852078e08c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.738635] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1595.738635] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ea79f6-6429-77e8-c421-8f58e95c0804" [ 1595.738635] env[62816]: _type = "Task" [ 1595.738635] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.749893] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ea79f6-6429-77e8-c421-8f58e95c0804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.776462] env[62816]: DEBUG oslo_concurrency.lockutils [req-a595a2c8-276b-41ba-8943-ffe9a814d9e2 req-1f6e1696-f9be-465c-880c-35a2d186435d service nova] Releasing lock "refresh_cache-8105e650-8482-40c6-bd7a-b8daea19a0d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.926811] env[62816]: DEBUG oslo_vmware.api [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Task: {'id': task-1788525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.350664} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.927112] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1595.927307] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1595.927484] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1595.927657] env[62816]: INFO nova.compute.manager [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1595.927912] env[62816]: DEBUG oslo.service.loopingcall [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.928136] env[62816]: DEBUG nova.compute.manager [-] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1595.928325] env[62816]: DEBUG nova.network.neutron [-] [instance: e1067d45-1938-4021-b902-21a1aa57058a] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1596.004699] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.119066] env[62816]: DEBUG nova.network.neutron [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Successfully created port: c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1596.142023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-74168bf8-0140-4375-a573-440b02768415 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.853s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.163475] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9469365-f01a-47ec-985e-bc084bfff128 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.171138] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1596.174605] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.175428] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.179766] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e182e2-6d8e-4932-aedd-dd48fac3ae82 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.215043] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e564c396-66cc-435b-a6a5-561f6c195ca5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.227786] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626e5295-1806-42ac-944b-4aed477ab2bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.252152] env[62816]: DEBUG nova.compute.provider_tree [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1596.260690] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ea79f6-6429-77e8-c421-8f58e95c0804, 'name': SearchDatastore_Task, 'duration_secs': 0.020977} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.260690] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.260690] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 65e97c6a-5d8f-4241-9095-65a5a6132a69/65e97c6a-5d8f-4241-9095-65a5a6132a69.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1596.260690] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d7fe29d-5901-46ac-a5cb-b8f068d5322e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.267087] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1596.267087] env[62816]: value = "task-1788526" [ 1596.267087] env[62816]: _type = "Task" [ 1596.267087] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.278746] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.688271] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.688559] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1596.754606] env[62816]: DEBUG nova.scheduler.client.report [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1596.782481] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788526, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.830166] env[62816]: DEBUG nova.compute.manager [req-154c1a2f-e1f1-486c-a2de-78421a2255ca req-8ad201af-4053-41fd-8377-6132bad873d0 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Received event network-vif-deleted-11c87595-6807-405a-ac5b-7099ec0d0bab {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1596.830379] env[62816]: INFO nova.compute.manager [req-154c1a2f-e1f1-486c-a2de-78421a2255ca req-8ad201af-4053-41fd-8377-6132bad873d0 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Neutron deleted interface 11c87595-6807-405a-ac5b-7099ec0d0bab; detaching it from the instance and deleting it from the info cache [ 1596.831503] env[62816]: DEBUG nova.network.neutron [req-154c1a2f-e1f1-486c-a2de-78421a2255ca req-8ad201af-4053-41fd-8377-6132bad873d0 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.987873] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1596.989253] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c66fc7-9ab0-43ec-82ff-bf0d9327868d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.002048] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1597.002048] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1da1f10b-f842-4ea0-a2d1-37f6154d89c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.047422] env[62816]: DEBUG nova.network.neutron [-] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.160889] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1597.161267] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1597.161587] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleting the datastore file [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1597.165443] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-989b250c-c069-47f7-aae9-1ed8c8b39ebf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.169054] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1597.169054] env[62816]: value = "task-1788528" [ 1597.169054] env[62816]: _type = "Task" [ 1597.169054] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.178016] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788528, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.191933] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1597.234809] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1597.235094] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1597.235315] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1597.235496] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1597.235638] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1597.235806] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1597.236026] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1597.236762] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1597.236762] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1597.236762] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1597.236762] env[62816]: DEBUG nova.virt.hardware [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1597.242603] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc35a45-a4a6-4ae6-9fde-14cf2c2f1320 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.252295] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e75bc1a-72f8-478d-80f3-a36f0b6f885d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.267521] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.108s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.270627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.500s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.270876] env[62816]: DEBUG nova.objects.instance [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lazy-loading 'resources' on Instance uuid 049e1f97-ab58-4797-a084-f16a7a58e2cc {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1597.282080] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788526, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.303035] env[62816]: INFO nova.scheduler.client.report [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Deleted allocations for instance 83f7b5b8-228b-4d17-ab52-8df65fe247e3 [ 1597.333643] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01812b5c-05ec-4502-b70a-c989c6cc73f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.345543] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ad879d-f46c-4107-a92c-e3cd3750dd0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.384189] env[62816]: DEBUG nova.compute.manager [req-154c1a2f-e1f1-486c-a2de-78421a2255ca req-8ad201af-4053-41fd-8377-6132bad873d0 service nova] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Detach interface failed, port_id=11c87595-6807-405a-ac5b-7099ec0d0bab, reason: Instance e1067d45-1938-4021-b902-21a1aa57058a could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1597.483633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "d34b7828-542e-4b66-a923-644d0d0f4866" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.483633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.483633] env[62816]: INFO nova.compute.manager [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Rebooting instance [ 1597.550164] env[62816]: INFO nova.compute.manager [-] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Took 1.62 seconds to deallocate network for instance. [ 1597.682536] env[62816]: DEBUG oslo_vmware.api [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788528, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429735} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.682536] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1597.682536] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1597.682536] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1597.699375] env[62816]: INFO nova.scheduler.client.report [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted allocations for instance 679cd9a3-2ed6-451f-b934-ba7738913959 [ 1597.789187] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788526, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.813392] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c8153cd-055d-4a22-b3e2-602a87b8b25d tempest-ServersTestMultiNic-398909768 tempest-ServersTestMultiNic-398909768-project-member] Lock "83f7b5b8-228b-4d17-ab52-8df65fe247e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.038s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.971474] env[62816]: DEBUG nova.compute.manager [req-fcd38dce-fb08-4aea-9700-3b390d159f4b req-c369cb41-90f5-431c-851d-8de592863326 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Received event network-vif-plugged-c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1597.971474] env[62816]: DEBUG oslo_concurrency.lockutils [req-fcd38dce-fb08-4aea-9700-3b390d159f4b req-c369cb41-90f5-431c-851d-8de592863326 service nova] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.971474] env[62816]: DEBUG oslo_concurrency.lockutils [req-fcd38dce-fb08-4aea-9700-3b390d159f4b req-c369cb41-90f5-431c-851d-8de592863326 service nova] Lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.971474] env[62816]: DEBUG oslo_concurrency.lockutils [req-fcd38dce-fb08-4aea-9700-3b390d159f4b req-c369cb41-90f5-431c-851d-8de592863326 service nova] Lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.971729] env[62816]: DEBUG nova.compute.manager [req-fcd38dce-fb08-4aea-9700-3b390d159f4b req-c369cb41-90f5-431c-851d-8de592863326 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] No waiting events found dispatching network-vif-plugged-c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1597.971769] env[62816]: WARNING nova.compute.manager [req-fcd38dce-fb08-4aea-9700-3b390d159f4b req-c369cb41-90f5-431c-851d-8de592863326 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Received unexpected event network-vif-plugged-c924d6c0-d5cc-40a9-b561-9393a5f71201 for instance with vm_state building and task_state spawning. [ 1597.973713] env[62816]: DEBUG nova.network.neutron [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Successfully updated port: c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1598.021505] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.021658] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquired lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.021846] env[62816]: DEBUG nova.network.neutron [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1598.057294] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.203348] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4536d8-7ee6-4f28-8fc2-c60ff9afda16 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.207884] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.214759] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c458d6-02ce-4f2b-9d40-cb0d48a91d9b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.247718] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31be4dbb-67cb-4ab6-98b6-48321d73b3e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.256343] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5059ee-7569-43dd-94aa-1e11878fbfe8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.274946] env[62816]: DEBUG nova.compute.provider_tree [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.285145] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788526, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.868393} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.285449] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 65e97c6a-5d8f-4241-9095-65a5a6132a69/65e97c6a-5d8f-4241-9095-65a5a6132a69.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1598.285665] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1598.285919] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cc31f6a2-b059-4d7c-8d08-687b5bdfce50 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.294061] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1598.294061] env[62816]: value = "task-1788529" [ 1598.294061] env[62816]: _type = "Task" [ 1598.294061] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.304609] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.477522] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.477732] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.477894] env[62816]: DEBUG nova.network.neutron [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1598.608808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.609244] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.781189] env[62816]: DEBUG nova.scheduler.client.report [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1598.813620] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072641} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.813915] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1598.814752] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97905727-68e3-414b-81c0-bf14a474e842 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.845803] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 65e97c6a-5d8f-4241-9095-65a5a6132a69/65e97c6a-5d8f-4241-9095-65a5a6132a69.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.849258] env[62816]: DEBUG nova.network.neutron [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Updating instance_info_cache with network_info: [{"id": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "address": "fa:16:3e:fb:ea:f2", "network": {"id": "4467952d-9ee5-4f34-a2e6-cf130475f861", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-739429624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "234d83ee73c7469b83426b99c7c5ed2f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6faa5-b2", "ovs_interfaceid": "56c6faa5-b2b2-42d5-85d9-dd995a578b48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1598.850699] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58085ce1-7e2d-47d9-b566-b904266ad48b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.872956] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1598.872956] env[62816]: value = "task-1788530" [ 1598.872956] env[62816]: _type = "Task" [ 1598.872956] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.882786] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788530, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.994124] env[62816]: DEBUG nova.compute.manager [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-vif-unplugged-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1598.994124] env[62816]: DEBUG oslo_concurrency.lockutils [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.995262] env[62816]: DEBUG oslo_concurrency.lockutils [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.995262] env[62816]: DEBUG oslo_concurrency.lockutils [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.995262] env[62816]: DEBUG nova.compute.manager [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] No waiting events found dispatching network-vif-unplugged-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1598.995262] env[62816]: WARNING nova.compute.manager [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received unexpected event network-vif-unplugged-f2f2e184-1921-455c-b435-44548769245c for instance with vm_state shelved_offloaded and task_state None. [ 1598.995262] env[62816]: DEBUG nova.compute.manager [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-changed-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1599.002179] env[62816]: DEBUG nova.compute.manager [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Refreshing instance network info cache due to event network-changed-f2f2e184-1921-455c-b435-44548769245c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1599.002430] env[62816]: DEBUG oslo_concurrency.lockutils [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.002665] env[62816]: DEBUG oslo_concurrency.lockutils [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.002740] env[62816]: DEBUG nova.network.neutron [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Refreshing network info cache for port f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1599.024411] env[62816]: DEBUG nova.network.neutron [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1599.113478] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1599.227388] env[62816]: DEBUG nova.network.neutron [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.287772] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.016s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.289977] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.980s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.290203] env[62816]: DEBUG nova.objects.instance [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1599.314186] env[62816]: INFO nova.scheduler.client.report [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Deleted allocations for instance 049e1f97-ab58-4797-a084-f16a7a58e2cc [ 1599.367628] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Releasing lock "refresh_cache-d34b7828-542e-4b66-a923-644d0d0f4866" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.373354] env[62816]: DEBUG nova.compute.manager [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1599.374527] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2162cf95-090c-4c48-b831-726c6eeac985 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.385852] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788530, 'name': ReconfigVM_Task, 'duration_secs': 0.323761} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.389656] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 65e97c6a-5d8f-4241-9095-65a5a6132a69/65e97c6a-5d8f-4241-9095-65a5a6132a69.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1599.392634] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2e7b12c-c1ad-4e56-afdd-5a9fb61e64c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.399755] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1599.399755] env[62816]: value = "task-1788531" [ 1599.399755] env[62816]: _type = "Task" [ 1599.399755] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.409153] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788531, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.631330] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1599.727767] env[62816]: DEBUG nova.network.neutron [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updated VIF entry in instance network info cache for port f2f2e184-1921-455c-b435-44548769245c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1599.728395] env[62816]: DEBUG nova.network.neutron [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf2f2e184-19", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.732925] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.732925] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Instance network_info: |[{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1599.732925] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:78:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c924d6c0-d5cc-40a9-b561-9393a5f71201', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1599.739280] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating folder: Project (8c54ea5a5abf4f0298b76f6081de8e60). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1599.739966] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f2786fc-590c-4c8e-9baf-020f485d013d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.751372] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created folder: Project (8c54ea5a5abf4f0298b76f6081de8e60) in parent group-v370905. [ 1599.751558] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating folder: Instances. Parent ref: group-v371069. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1599.751774] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7462a78d-d13a-423c-9d99-a0e024af90f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.760570] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created folder: Instances in parent group-v371069. [ 1599.760800] env[62816]: DEBUG oslo.service.loopingcall [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1599.760984] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1599.761192] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-076aac07-9b0e-444e-bb51-e9b0b2a0846d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.780621] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1599.780621] env[62816]: value = "task-1788534" [ 1599.780621] env[62816]: _type = "Task" [ 1599.780621] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.789033] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788534, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.823169] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b21db3e6-1b60-4f17-ac2c-dae0186bba2b tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "049e1f97-ab58-4797-a084-f16a7a58e2cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.836s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.909284] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788531, 'name': Rename_Task, 'duration_secs': 0.145659} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.909555] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1599.909804] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8a47d17-5472-4f36-8d92-e7d93e69b6e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.916132] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1599.916132] env[62816]: value = "task-1788535" [ 1599.916132] env[62816]: _type = "Task" [ 1599.916132] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.924597] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.999187] env[62816]: DEBUG nova.compute.manager [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Received event network-changed-c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1599.999187] env[62816]: DEBUG nova.compute.manager [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Refreshing instance network info cache due to event network-changed-c924d6c0-d5cc-40a9-b561-9393a5f71201. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1599.999187] env[62816]: DEBUG oslo_concurrency.lockutils [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.999187] env[62816]: DEBUG oslo_concurrency.lockutils [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.999187] env[62816]: DEBUG nova.network.neutron [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Refreshing network info cache for port c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.232955] env[62816]: DEBUG oslo_concurrency.lockutils [req-facf8d72-8824-4d2e-aae4-d25ccda602a9 req-38a7ea2c-ba8c-4976-8bad-0634e94abe20 service nova] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1600.295096] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788534, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.299802] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b536a4d-ffa6-4025-8e55-d8c7f7527dfb tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.301917] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.972s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.303284] env[62816]: INFO nova.compute.claims [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1600.403024] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b4381a0-d9d7-46d4-b89a-4013afadde64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.412425] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Doing hard reboot of VM {{(pid=62816) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1600.412425] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3c17cacc-013d-43c4-8394-e860ee40a2ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.418375] env[62816]: DEBUG oslo_vmware.api [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1600.418375] env[62816]: value = "task-1788536" [ 1600.418375] env[62816]: _type = "Task" [ 1600.418375] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.432551] env[62816]: DEBUG oslo_vmware.api [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788536, 'name': ResetVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.436450] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788535, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.559832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.652924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.652924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.652924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.652924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.652924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.653782] env[62816]: INFO nova.compute.manager [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Terminating instance [ 1600.656866] env[62816]: DEBUG nova.compute.manager [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1600.657259] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1600.658188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64defc5-8053-44d9-abdc-143dacdc6f25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.667522] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1600.668032] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf58f96e-7cfb-4df6-bf70-2467b245f19a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.676020] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1600.676020] env[62816]: value = "task-1788537" [ 1600.676020] env[62816]: _type = "Task" [ 1600.676020] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.685206] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788537, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.795932] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788534, 'name': CreateVM_Task, 'duration_secs': 0.601243} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.796279] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1600.799701] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.799909] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.800280] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1600.800607] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85df1b07-3488-4802-90aa-702c0ed6c305 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.806461] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1600.806461] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52dd68e7-442b-f6bc-cec2-97713ca536e1" [ 1600.806461] env[62816]: _type = "Task" [ 1600.806461] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.818276] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd68e7-442b-f6bc-cec2-97713ca536e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.819304] env[62816]: DEBUG nova.network.neutron [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updated VIF entry in instance network info cache for port c924d6c0-d5cc-40a9-b561-9393a5f71201. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1600.819781] env[62816]: DEBUG nova.network.neutron [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.929486] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788535, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.937791] env[62816]: DEBUG oslo_vmware.api [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788536, 'name': ResetVM_Task, 'duration_secs': 0.144658} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.937791] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Did hard reboot of VM {{(pid=62816) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1600.937791] env[62816]: DEBUG nova.compute.manager [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1600.937791] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daffdb60-8990-4571-a6c0-9d38957b95ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.046347] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabd92-5662-25ba-16a4-93b7b09e6e8f/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1601.047409] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b106397-0e8d-42d8-af69-3b164809f74d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.053963] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabd92-5662-25ba-16a4-93b7b09e6e8f/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1601.054162] env[62816]: ERROR oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabd92-5662-25ba-16a4-93b7b09e6e8f/disk-0.vmdk due to incomplete transfer. [ 1601.054393] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ed8e369c-d0eb-4f28-abb6-bb2009c39b69 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.061479] env[62816]: DEBUG oslo_vmware.rw_handles [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52fabd92-5662-25ba-16a4-93b7b09e6e8f/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1601.061689] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Uploaded image 3dbb4887-cb05-4553-b496-bc3e99336442 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1601.063453] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1601.063698] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-517e48d5-ef6b-42de-bf9e-493683801157 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.070636] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1601.070636] env[62816]: value = "task-1788538" [ 1601.070636] env[62816]: _type = "Task" [ 1601.070636] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.079943] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788538, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.184364] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788537, 'name': PowerOffVM_Task, 'duration_secs': 0.344529} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.185026] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1601.185026] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1601.185252] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d7d61ee-7967-422e-a682-194b5b4bf73e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.321039] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd68e7-442b-f6bc-cec2-97713ca536e1, 'name': SearchDatastore_Task, 'duration_secs': 0.013413} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.321200] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.321456] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1601.321633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1601.321716] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1601.321888] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1601.322400] env[62816]: DEBUG oslo_concurrency.lockutils [req-815b48cc-4cc6-4a28-a3e2-1ac12cae4bd9 req-01e1acb4-2fc1-46ff-b6cb-dca425f39983 service nova] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1601.322704] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a48344a9-eb6d-4add-ba18-35e8b8a62386 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.333304] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1601.333499] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1601.334228] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d0cb805-c8aa-4fe6-800b-376273ac4e80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.339706] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1601.339706] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526de9c2-e660-66d5-7335-a85b9fd83777" [ 1601.339706] env[62816]: _type = "Task" [ 1601.339706] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.349871] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526de9c2-e660-66d5-7335-a85b9fd83777, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.366883] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1601.367170] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1601.367364] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Deleting the datastore file [datastore1] 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1601.367633] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82ad7073-d44b-47a8-8860-cd108e5798a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.378148] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for the task: (returnval){ [ 1601.378148] env[62816]: value = "task-1788540" [ 1601.378148] env[62816]: _type = "Task" [ 1601.378148] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.390927] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.430370] env[62816]: DEBUG oslo_vmware.api [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788535, 'name': PowerOnVM_Task, 'duration_secs': 1.106394} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.430653] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1601.430856] env[62816]: INFO nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Took 11.45 seconds to spawn the instance on the hypervisor. [ 1601.431055] env[62816]: DEBUG nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1601.431826] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a016ba-a961-46bd-ae45-c8f60def93ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.451178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8db04051-6e5b-48e4-bf00-1e58c01e59c4 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.968s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.582497] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788538, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.706530] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4485f2-2e1e-4af6-b6ac-65c11ade9e8f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.716536] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb2e211-3064-422d-8c46-ba260c670187 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.751080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a94709-4e49-4343-b2d2-7e93fa9822c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.759946] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da9f4be-2e59-4647-aca9-eac9f55468e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.773995] env[62816]: DEBUG nova.compute.provider_tree [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1601.850697] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526de9c2-e660-66d5-7335-a85b9fd83777, 'name': SearchDatastore_Task, 'duration_secs': 0.029901} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.851517] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-272757b9-139b-4b0a-807e-c8c65e7d04b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.856914] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1601.856914] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c99ead-f2d2-119e-ebac-29b89ee85fc9" [ 1601.856914] env[62816]: _type = "Task" [ 1601.856914] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.865085] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c99ead-f2d2-119e-ebac-29b89ee85fc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.887284] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.954089] env[62816]: INFO nova.compute.manager [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Took 48.05 seconds to build instance. [ 1602.082655] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788538, 'name': Destroy_Task, 'duration_secs': 1.010702} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.083660] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Destroyed the VM [ 1602.085396] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1602.085679] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-e116406b-0eb9-4a68-aef3-961fd6e5c0ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.091887] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1602.091887] env[62816]: value = "task-1788541" [ 1602.091887] env[62816]: _type = "Task" [ 1602.091887] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.099533] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788541, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.246629] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "c4117422-edd4-49a0-882c-2d8ae39b344d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.246858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.252639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.253270] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.253270] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1602.277252] env[62816]: DEBUG nova.scheduler.client.report [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1602.369660] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c99ead-f2d2-119e-ebac-29b89ee85fc9, 'name': SearchDatastore_Task, 'duration_secs': 0.036137} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.370155] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.370311] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c/9745413b-2bd8-45d7-8491-483e4921b59c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1602.370569] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab005e02-0df4-430a-9add-a5edaa68b1f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.377469] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1602.377469] env[62816]: value = "task-1788542" [ 1602.377469] env[62816]: _type = "Task" [ 1602.377469] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.391993] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.395398] env[62816]: DEBUG oslo_vmware.api [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Task: {'id': task-1788540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.667246} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.395697] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.395888] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.396085] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.396266] env[62816]: INFO nova.compute.manager [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Took 1.74 seconds to destroy the instance on the hypervisor. [ 1602.396517] env[62816]: DEBUG oslo.service.loopingcall [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.396710] env[62816]: DEBUG nova.compute.manager [-] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1602.396802] env[62816]: DEBUG nova.network.neutron [-] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1602.425746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Acquiring lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.426186] env[62816]: DEBUG oslo_concurrency.lockutils [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Acquired lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.426186] env[62816]: DEBUG nova.network.neutron [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1602.456760] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6902f7ad-b1d8-4a64-9f09-63a32d68f6bd tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.581s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.493375] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "d34b7828-542e-4b66-a923-644d0d0f4866" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.493655] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.493868] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "d34b7828-542e-4b66-a923-644d0d0f4866-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.494056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.494284] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.497014] env[62816]: INFO nova.compute.manager [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Terminating instance [ 1602.498871] env[62816]: DEBUG nova.compute.manager [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1602.499082] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.499891] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d13883-e06a-4c49-a12b-f7d4e831ae09 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.512581] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1602.512878] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f4b00b9-9905-4372-a423-c056810c0650 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.520638] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1602.520638] env[62816]: value = "task-1788543" [ 1602.520638] env[62816]: _type = "Task" [ 1602.520638] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.529104] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.606066] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788541, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.749799] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1602.782447] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.481s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.783083] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1602.786158] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.094s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.787722] env[62816]: INFO nova.compute.claims [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1602.892285] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788542, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.993346] env[62816]: DEBUG nova.compute.manager [req-fa0016e2-942b-4604-9ca4-637c754867c5 req-3116d3f7-8ef3-4470-9af1-4aa3b896682f service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Received event network-vif-deleted-c9c1cb74-1895-4673-9834-96675448ee76 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1602.993555] env[62816]: INFO nova.compute.manager [req-fa0016e2-942b-4604-9ca4-637c754867c5 req-3116d3f7-8ef3-4470-9af1-4aa3b896682f service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Neutron deleted interface c9c1cb74-1895-4673-9834-96675448ee76; detaching it from the instance and deleting it from the info cache [ 1602.994279] env[62816]: DEBUG nova.network.neutron [req-fa0016e2-942b-4604-9ca4-637c754867c5 req-3116d3f7-8ef3-4470-9af1-4aa3b896682f service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.034492] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788543, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.102746] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788541, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.274074] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.293481] env[62816]: DEBUG nova.compute.utils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1603.298192] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1603.298387] env[62816]: DEBUG nova.network.neutron [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1603.370896] env[62816]: DEBUG nova.network.neutron [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Updating instance_info_cache with network_info: [{"id": "d7d10695-86f5-4fee-b062-9934fa07e003", "address": "fa:16:3e:c2:33:f6", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d10695-86", "ovs_interfaceid": "d7d10695-86f5-4fee-b062-9934fa07e003", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.389021] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788542, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565975} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.389021] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c/9745413b-2bd8-45d7-8491-483e4921b59c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1603.389021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1603.389221] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ebb4ab2-8a33-4014-b1b8-47c29bad82aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.394966] env[62816]: DEBUG nova.policy [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b50072f58a3405e897cb98c269069ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47262466d7ba483aafb4f819349f0be3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1603.397705] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1603.397705] env[62816]: value = "task-1788544" [ 1603.397705] env[62816]: _type = "Task" [ 1603.397705] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.408773] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788544, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.421641] env[62816]: DEBUG nova.network.neutron [-] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.496539] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0adbc254-2734-430e-8614-7cc90b7cf1ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.506875] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a06bfe1-d0c7-442b-8042-5ab963376841 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.549797] env[62816]: DEBUG nova.compute.manager [req-fa0016e2-942b-4604-9ca4-637c754867c5 req-3116d3f7-8ef3-4470-9af1-4aa3b896682f service nova] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Detach interface failed, port_id=c9c1cb74-1895-4673-9834-96675448ee76, reason: Instance 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1603.554883] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788543, 'name': PowerOffVM_Task, 'duration_secs': 0.585171} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.555298] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1603.559026] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1603.559026] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a1d0bdf0-a4e3-4892-a0f4-d6c0204a288e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.603765] env[62816]: DEBUG oslo_vmware.api [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788541, 'name': RemoveSnapshot_Task, 'duration_secs': 1.012816} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.605452] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1603.605452] env[62816]: INFO nova.compute.manager [None req-cf1c8d72-a530-427e-8013-de3ff49a6131 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Took 18.19 seconds to snapshot the instance on the hypervisor. [ 1603.635663] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1603.635868] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1603.636398] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Deleting the datastore file [datastore1] d34b7828-542e-4b66-a923-644d0d0f4866 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1603.636398] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f736ae31-cd18-4bfa-9ed8-37c91f47b962 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.643015] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for the task: (returnval){ [ 1603.643015] env[62816]: value = "task-1788546" [ 1603.643015] env[62816]: _type = "Task" [ 1603.643015] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.653899] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.681253] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": null, "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf2f2e184-19", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.801265] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1603.868132] env[62816]: DEBUG nova.network.neutron [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Successfully created port: 3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1603.872253] env[62816]: DEBUG oslo_concurrency.lockutils [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Releasing lock "refresh_cache-65e97c6a-5d8f-4241-9095-65a5a6132a69" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.872437] env[62816]: DEBUG nova.compute.manager [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Inject network info {{(pid=62816) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1603.872735] env[62816]: DEBUG nova.compute.manager [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] network_info to inject: |[{"id": "d7d10695-86f5-4fee-b062-9934fa07e003", "address": "fa:16:3e:c2:33:f6", "network": {"id": "75e5dc4f-5bb5-40dc-9704-cea8380a89e9", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-218676624-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f016ab6a03848ba8014647f483f0b92", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37434b93-dfdc-4a3f-bf5a-9f2cbe25a754", "external-id": "nsx-vlan-transportzone-676", "segmentation_id": 676, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd7d10695-86", "ovs_interfaceid": "d7d10695-86f5-4fee-b062-9934fa07e003", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1603.878898] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Reconfiguring VM instance to set the machine id {{(pid=62816) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1603.884953] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10801d33-e64a-4d6e-b9d4-88364be26fc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.905848] env[62816]: DEBUG oslo_vmware.api [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Waiting for the task: (returnval){ [ 1603.905848] env[62816]: value = "task-1788547" [ 1603.905848] env[62816]: _type = "Task" [ 1603.905848] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.913036] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788544, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070495} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.913270] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1603.917262] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f406cc8e-4726-4e71-b1df-d0519a0f0b46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.926096] env[62816]: DEBUG oslo_vmware.api [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Task: {'id': task-1788547, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.926096] env[62816]: INFO nova.compute.manager [-] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Took 1.53 seconds to deallocate network for instance. [ 1603.946264] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c/9745413b-2bd8-45d7-8491-483e4921b59c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1603.954022] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6ee0c1c-bc9e-4015-a258-1fe017558e9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.974692] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1603.974692] env[62816]: value = "task-1788548" [ 1603.974692] env[62816]: _type = "Task" [ 1603.974692] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.984395] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788548, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.157781] env[62816]: DEBUG oslo_vmware.api [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Task: {'id': task-1788546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.273472} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.158733] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1604.158733] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1604.158922] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1604.159138] env[62816]: INFO nova.compute.manager [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1604.159477] env[62816]: DEBUG oslo.service.loopingcall [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.159673] env[62816]: DEBUG nova.compute.manager [-] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1604.159767] env[62816]: DEBUG nova.network.neutron [-] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1604.183541] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.183751] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1604.184029] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.184174] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.186925] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.187368] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.188473] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.188473] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.188473] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1604.188473] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.355996] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c0813b-4d1a-493e-823c-2225b119d732 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.365819] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987cb144-8178-4b1b-8b61-e20c54db23ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.400998] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21fe6d5-8dd2-483b-9084-1017c19ca0bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.412938] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e97268-6ac9-422d-b639-730c97b965f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.422785] env[62816]: DEBUG oslo_vmware.api [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Task: {'id': task-1788547, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.430707] env[62816]: DEBUG nova.compute.provider_tree [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.469886] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.486184] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788548, 'name': ReconfigVM_Task, 'duration_secs': 0.306996} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.486548] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c/9745413b-2bd8-45d7-8491-483e4921b59c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1604.487708] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f12f8d3b-d2e2-47c6-8e9c-62d302cae156 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.497849] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1604.497849] env[62816]: value = "task-1788549" [ 1604.497849] env[62816]: _type = "Task" [ 1604.497849] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.507297] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788549, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.525695] env[62816]: DEBUG nova.compute.manager [req-92b1cc42-f720-4830-8177-147f252596ab req-925a7b78-2968-4660-a4b2-f57c0457b624 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Received event network-vif-deleted-56c6faa5-b2b2-42d5-85d9-dd995a578b48 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.525912] env[62816]: INFO nova.compute.manager [req-92b1cc42-f720-4830-8177-147f252596ab req-925a7b78-2968-4660-a4b2-f57c0457b624 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Neutron deleted interface 56c6faa5-b2b2-42d5-85d9-dd995a578b48; detaching it from the instance and deleting it from the info cache [ 1604.526130] env[62816]: DEBUG nova.network.neutron [req-92b1cc42-f720-4830-8177-147f252596ab req-925a7b78-2968-4660-a4b2-f57c0457b624 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.690929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.813613] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1604.835086] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1604.835478] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1604.835619] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1604.835809] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1604.836068] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1604.836128] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1604.836336] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1604.836517] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1604.836709] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1604.836910] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1604.837106] env[62816]: DEBUG nova.virt.hardware [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1604.838193] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42aca33c-99d9-441b-9bb1-758402397a30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.846960] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f9405db-4555-4a94-bc18-12563d182420 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.919136] env[62816]: DEBUG oslo_vmware.api [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Task: {'id': task-1788547, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.934202] env[62816]: DEBUG nova.scheduler.client.report [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1604.983077] env[62816]: DEBUG nova.network.neutron [-] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1605.010070] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788549, 'name': Rename_Task, 'duration_secs': 0.156637} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.010070] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1605.010070] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f01d801f-197d-4348-aa15-ab99f9c168f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.013863] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1605.013863] env[62816]: value = "task-1788550" [ 1605.013863] env[62816]: _type = "Task" [ 1605.013863] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.020983] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.029136] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fea99b76-83bd-45e9-b7c3-48061fcfd873 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.037784] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc76b35a-fc75-4865-b7fd-e7c5e1938ece {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.072717] env[62816]: DEBUG nova.compute.manager [req-92b1cc42-f720-4830-8177-147f252596ab req-925a7b78-2968-4660-a4b2-f57c0457b624 service nova] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Detach interface failed, port_id=56c6faa5-b2b2-42d5-85d9-dd995a578b48, reason: Instance d34b7828-542e-4b66-a923-644d0d0f4866 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1605.318560] env[62816]: INFO nova.compute.manager [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Rebuilding instance [ 1605.366202] env[62816]: DEBUG nova.compute.manager [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1605.368844] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3153ec9c-4a6a-4721-82f3-7733ab83e7a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.422143] env[62816]: DEBUG oslo_vmware.api [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] Task: {'id': task-1788547, 'name': ReconfigVM_Task, 'duration_secs': 1.211767} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.422524] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-26f795f5-6229-4482-b2cc-7ff272013955 tempest-ServersAdminTestJSON-1630693862 tempest-ServersAdminTestJSON-1630693862-project-admin] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Reconfigured VM instance to set the machine id {{(pid=62816) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1605.440702] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.444023] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1605.444023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.529s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.446415] env[62816]: INFO nova.compute.claims [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1605.487878] env[62816]: INFO nova.compute.manager [-] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Took 1.33 seconds to deallocate network for instance. [ 1605.530648] env[62816]: DEBUG oslo_vmware.api [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788550, 'name': PowerOnVM_Task, 'duration_secs': 0.446129} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.531250] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1605.531445] env[62816]: INFO nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1605.531664] env[62816]: DEBUG nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1605.532626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ca4ff2-7117-485b-86f1-8a1589e5aafc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.677242] env[62816]: DEBUG nova.network.neutron [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Successfully updated port: 3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1605.721314] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "75165526-2744-40b3-b311-45d13cc48cf1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.721762] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "75165526-2744-40b3-b311-45d13cc48cf1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.879934] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.880271] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19f7e5b3-765e-42bf-b946-8a33fc9f35eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.888291] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1605.888291] env[62816]: value = "task-1788551" [ 1605.888291] env[62816]: _type = "Task" [ 1605.888291] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.897228] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788551, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.953893] env[62816]: DEBUG nova.compute.utils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1605.955223] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1605.955463] env[62816]: DEBUG nova.network.neutron [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1605.995672] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.021649] env[62816]: DEBUG nova.policy [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5b9d93a5a594b798454522d4d1c477d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a24ead98c9db4f3f90780f134e14cc1f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1606.057954] env[62816]: INFO nova.compute.manager [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 43.74 seconds to build instance. [ 1606.181734] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "refresh_cache-b788e586-850b-46e7-a204-d80eac56cce7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.181892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquired lock "refresh_cache-b788e586-850b-46e7-a204-d80eac56cce7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.182109] env[62816]: DEBUG nova.network.neutron [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1606.224782] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1606.337254] env[62816]: DEBUG nova.network.neutron [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Successfully created port: 74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1606.398694] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788551, 'name': PowerOffVM_Task, 'duration_secs': 0.189611} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.398986] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1606.399224] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1606.399997] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfd8951-e6c5-4ac8-92ff-fd41219ce600 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.407054] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1606.407285] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b85a35b4-fdfa-4c29-8f2c-0ff8edcff5fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.458749] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1606.560665] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b379784b-959a-4de7-b955-6a327bb9c5d6 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.285s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.579620] env[62816]: DEBUG nova.compute.manager [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Received event network-vif-plugged-3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1606.579620] env[62816]: DEBUG oslo_concurrency.lockutils [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] Acquiring lock "b788e586-850b-46e7-a204-d80eac56cce7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.579620] env[62816]: DEBUG oslo_concurrency.lockutils [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] Lock "b788e586-850b-46e7-a204-d80eac56cce7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.579620] env[62816]: DEBUG oslo_concurrency.lockutils [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] Lock "b788e586-850b-46e7-a204-d80eac56cce7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.579620] env[62816]: DEBUG nova.compute.manager [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] No waiting events found dispatching network-vif-plugged-3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1606.579620] env[62816]: WARNING nova.compute.manager [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Received unexpected event network-vif-plugged-3e105313-389a-4820-a69d-f8e130383f4f for instance with vm_state building and task_state spawning. [ 1606.579620] env[62816]: DEBUG nova.compute.manager [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Received event network-changed-3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1606.579620] env[62816]: DEBUG nova.compute.manager [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Refreshing instance network info cache due to event network-changed-3e105313-389a-4820-a69d-f8e130383f4f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1606.579620] env[62816]: DEBUG oslo_concurrency.lockutils [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] Acquiring lock "refresh_cache-b788e586-850b-46e7-a204-d80eac56cce7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.684754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1606.684754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1606.684754] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleting the datastore file [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1606.684954] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66202de8-9636-4e74-b2fb-da5795393a64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.695712] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1606.695712] env[62816]: value = "task-1788553" [ 1606.695712] env[62816]: _type = "Task" [ 1606.695712] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.703823] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.747325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.751783] env[62816]: DEBUG nova.network.neutron [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1606.904541] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d63dd76-2fcf-4b86-8e24-3a6b9af38ec0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.912531] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cc5587-d2f8-4789-8202-15272b0019b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.959442] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e94fe3-5529-486b-a781-4adefe55dfae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.971366] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d10495-2644-4f56-b6a8-5ffe783d8fd1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.996582] env[62816]: DEBUG nova.compute.provider_tree [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1607.029630] env[62816]: DEBUG nova.network.neutron [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Updating instance_info_cache with network_info: [{"id": "3e105313-389a-4820-a69d-f8e130383f4f", "address": "fa:16:3e:91:96:3d", "network": {"id": "52bd41a2-b9e5-4f6e-8fa1-f1002161c6f7", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1468321507-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47262466d7ba483aafb4f819349f0be3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e105313-38", "ovs_interfaceid": "3e105313-389a-4820-a69d-f8e130383f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.207498] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201708} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.207696] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1607.207857] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1607.208138] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1607.484441] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1607.490727] env[62816]: DEBUG nova.compute.manager [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Received event network-changed-c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1607.490924] env[62816]: DEBUG nova.compute.manager [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Refreshing instance network info cache due to event network-changed-c924d6c0-d5cc-40a9-b561-9393a5f71201. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1607.491155] env[62816]: DEBUG oslo_concurrency.lockutils [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.491298] env[62816]: DEBUG oslo_concurrency.lockutils [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.491457] env[62816]: DEBUG nova.network.neutron [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Refreshing network info cache for port c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1607.500802] env[62816]: DEBUG nova.scheduler.client.report [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1607.512160] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1607.512405] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1607.512563] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1607.512752] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1607.512901] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1607.513059] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1607.513273] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1607.513544] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1607.513737] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1607.513910] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1607.514098] env[62816]: DEBUG nova.virt.hardware [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1607.515156] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f521aad-c41f-462c-8aa4-236d3278b712 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.523312] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5477521c-3f31-45c3-952c-6f36156db76a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.536904] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Releasing lock "refresh_cache-b788e586-850b-46e7-a204-d80eac56cce7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.537217] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Instance network_info: |[{"id": "3e105313-389a-4820-a69d-f8e130383f4f", "address": "fa:16:3e:91:96:3d", "network": {"id": "52bd41a2-b9e5-4f6e-8fa1-f1002161c6f7", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1468321507-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47262466d7ba483aafb4f819349f0be3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e105313-38", "ovs_interfaceid": "3e105313-389a-4820-a69d-f8e130383f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1607.537894] env[62816]: DEBUG oslo_concurrency.lockutils [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] Acquired lock "refresh_cache-b788e586-850b-46e7-a204-d80eac56cce7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.538086] env[62816]: DEBUG nova.network.neutron [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Refreshing network info cache for port 3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1607.539566] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:96:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e105313-389a-4820-a69d-f8e130383f4f', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1607.546341] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Creating folder: Project (47262466d7ba483aafb4f819349f0be3). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1607.547192] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d154c510-d35e-4876-ae9b-ec2ef1658780 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.557083] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Created folder: Project (47262466d7ba483aafb4f819349f0be3) in parent group-v370905. [ 1607.557266] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Creating folder: Instances. Parent ref: group-v371072. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1607.557473] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1d08da7-dca1-4818-8246-212088724223 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.566630] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Created folder: Instances in parent group-v371072. [ 1607.566853] env[62816]: DEBUG oslo.service.loopingcall [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1607.567043] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1607.567234] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a4a721fc-36b0-4c42-b5ed-2c23be1ed671 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.586088] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1607.586088] env[62816]: value = "task-1788556" [ 1607.586088] env[62816]: _type = "Task" [ 1607.586088] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.593270] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788556, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.008704] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.008704] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1608.010809] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.084s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.011088] env[62816]: DEBUG nova.objects.instance [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lazy-loading 'resources' on Instance uuid 3c4cca03-b2ee-48a2-9a15-a21124bd6599 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1608.098316] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788556, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1608.251234] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1608.251857] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1608.252053] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1608.252387] env[62816]: DEBUG nova.virt.hardware [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1608.253661] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e620d5-0eba-490b-8239-56812c39a866 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.267028] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f10f770-0054-491a-8314-9a3eec9280a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.284371] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:cb:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2b6a990-a634-4e68-ba4c-886b856209a5', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1608.293174] env[62816]: DEBUG oslo.service.loopingcall [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.293515] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1608.293779] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-af8892b9-3c00-41d2-9c6b-a029a815e07f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.316900] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1608.316900] env[62816]: value = "task-1788557" [ 1608.316900] env[62816]: _type = "Task" [ 1608.316900] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.332589] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788557, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.370939] env[62816]: DEBUG nova.network.neutron [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Updated VIF entry in instance network info cache for port 3e105313-389a-4820-a69d-f8e130383f4f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.371134] env[62816]: DEBUG nova.network.neutron [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Updating instance_info_cache with network_info: [{"id": "3e105313-389a-4820-a69d-f8e130383f4f", "address": "fa:16:3e:91:96:3d", "network": {"id": "52bd41a2-b9e5-4f6e-8fa1-f1002161c6f7", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1468321507-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47262466d7ba483aafb4f819349f0be3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e105313-38", "ovs_interfaceid": "3e105313-389a-4820-a69d-f8e130383f4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.401421] env[62816]: DEBUG nova.network.neutron [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updated VIF entry in instance network info cache for port c924d6c0-d5cc-40a9-b561-9393a5f71201. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.401939] env[62816]: DEBUG nova.network.neutron [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.514863] env[62816]: DEBUG nova.compute.utils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1608.516716] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1608.516912] env[62816]: DEBUG nova.network.neutron [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1608.528835] env[62816]: DEBUG nova.network.neutron [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Successfully updated port: 74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1608.568206] env[62816]: DEBUG nova.policy [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c9fdfaf9360f4dbb959bf3e8bcbee731', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8179e67e019493a894cd7c67825743c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1608.598011] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788556, 'name': CreateVM_Task, 'duration_secs': 0.611522} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.600745] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1608.602964] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.603177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.603570] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1608.604741] env[62816]: DEBUG nova.compute.manager [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Received event network-vif-plugged-74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1608.604934] env[62816]: DEBUG oslo_concurrency.lockutils [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] Acquiring lock "ede88298-0eae-4471-b602-c26b5fa7a72a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.606167] env[62816]: DEBUG oslo_concurrency.lockutils [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.606376] env[62816]: DEBUG oslo_concurrency.lockutils [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.606573] env[62816]: DEBUG nova.compute.manager [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] No waiting events found dispatching network-vif-plugged-74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1608.606746] env[62816]: WARNING nova.compute.manager [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Received unexpected event network-vif-plugged-74b0ab45-5abc-4f73-9e97-70674d2c1841 for instance with vm_state building and task_state spawning. [ 1608.606925] env[62816]: DEBUG nova.compute.manager [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Received event network-changed-74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1608.607076] env[62816]: DEBUG nova.compute.manager [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Refreshing instance network info cache due to event network-changed-74b0ab45-5abc-4f73-9e97-70674d2c1841. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1608.607269] env[62816]: DEBUG oslo_concurrency.lockutils [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] Acquiring lock "refresh_cache-ede88298-0eae-4471-b602-c26b5fa7a72a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.607408] env[62816]: DEBUG oslo_concurrency.lockutils [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] Acquired lock "refresh_cache-ede88298-0eae-4471-b602-c26b5fa7a72a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.607565] env[62816]: DEBUG nova.network.neutron [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Refreshing network info cache for port 74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.608896] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3590feac-801e-4155-b536-79f782f94a6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.618783] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1608.618783] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528326ba-ada9-5c0d-2b4b-61e3d9b05e37" [ 1608.618783] env[62816]: _type = "Task" [ 1608.618783] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.628326] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528326ba-ada9-5c0d-2b4b-61e3d9b05e37, 'name': SearchDatastore_Task, 'duration_secs': 0.008674} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.630885] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.631155] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1608.631391] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.631544] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.631722] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1608.632160] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00719a5e-027a-412c-8b29-509b05825271 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.641500] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1608.641692] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1608.645070] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-667cd510-36ad-4894-8b93-06835ca12658 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.651270] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1608.651270] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52143549-d42a-3847-bd5e-c156a2688a5d" [ 1608.651270] env[62816]: _type = "Task" [ 1608.651270] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.659416] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52143549-d42a-3847-bd5e-c156a2688a5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.829665] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788557, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.844389] env[62816]: DEBUG nova.network.neutron [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Successfully created port: 75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1608.877022] env[62816]: DEBUG oslo_concurrency.lockutils [req-ed9ee379-dd06-40b8-b75f-49cf08aa75d5 req-7d43c31a-034b-4129-a15d-33b3acdd9d38 service nova] Releasing lock "refresh_cache-b788e586-850b-46e7-a204-d80eac56cce7" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.907929] env[62816]: DEBUG oslo_concurrency.lockutils [req-a9dec56a-fdc0-4f4d-8cee-fe00fe3d4386 req-f0159c07-371b-4e2d-a3c1-f77f52d256e8 service nova] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.963377] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0cb96b7-463f-40dd-9cb3-55c5391022a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.971512] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5489e1-e867-4272-9d3a-27da7b855e74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.005094] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761632a9-0970-43f0-9e71-885f2ed6445b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.012503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3167fd6a-2907-47e6-9555-156993ace85a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.027439] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1609.031229] env[62816]: DEBUG nova.compute.provider_tree [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1609.035108] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "refresh_cache-ede88298-0eae-4471-b602-c26b5fa7a72a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.158702] env[62816]: DEBUG nova.network.neutron [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1609.164504] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52143549-d42a-3847-bd5e-c156a2688a5d, 'name': SearchDatastore_Task, 'duration_secs': 0.008445} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.165247] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baab9d78-52d9-4e01-86f3-025dc050dd80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.170861] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1609.170861] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5249b8ba-5922-7416-b07b-084e4a06fb0b" [ 1609.170861] env[62816]: _type = "Task" [ 1609.170861] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.178905] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5249b8ba-5922-7416-b07b-084e4a06fb0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.263645] env[62816]: DEBUG nova.network.neutron [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.327960] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788557, 'name': CreateVM_Task, 'duration_secs': 0.585739} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.328157] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1609.328830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.329026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.329359] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1609.329620] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70197e45-6403-45e1-b46a-d5081310e5a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.334580] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1609.334580] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523124ec-b8aa-8a63-c3d6-b6195ed12170" [ 1609.334580] env[62816]: _type = "Task" [ 1609.334580] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.342086] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523124ec-b8aa-8a63-c3d6-b6195ed12170, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.538919] env[62816]: DEBUG nova.scheduler.client.report [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1609.682292] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5249b8ba-5922-7416-b07b-084e4a06fb0b, 'name': SearchDatastore_Task, 'duration_secs': 0.013174} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.682551] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.682820] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b788e586-850b-46e7-a204-d80eac56cce7/b788e586-850b-46e7-a204-d80eac56cce7.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1609.683094] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b27815c9-7c52-43b2-af95-e1b813b82d9d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.689115] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1609.689115] env[62816]: value = "task-1788558" [ 1609.689115] env[62816]: _type = "Task" [ 1609.689115] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.697327] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788558, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.767234] env[62816]: DEBUG oslo_concurrency.lockutils [req-8be0bf18-22de-4cf2-a666-b81b1dddb519 req-9562e4a6-ab3b-40ba-9f34-f0a6b6009450 service nova] Releasing lock "refresh_cache-ede88298-0eae-4471-b602-c26b5fa7a72a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.767647] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquired lock "refresh_cache-ede88298-0eae-4471-b602-c26b5fa7a72a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.767829] env[62816]: DEBUG nova.network.neutron [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1609.845243] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523124ec-b8aa-8a63-c3d6-b6195ed12170, 'name': SearchDatastore_Task, 'duration_secs': 0.051677} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.845626] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.845874] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.846124] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.846276] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.846461] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.846728] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-281545d3-becd-405a-ae94-33ea0611b8c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.869264] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.869464] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.870207] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24bc1070-4cae-4a31-b8af-64d6593647bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.875674] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1609.875674] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a83806-7639-c6c0-6fae-00cbde0eae9f" [ 1609.875674] env[62816]: _type = "Task" [ 1609.875674] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.883776] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a83806-7639-c6c0-6fae-00cbde0eae9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.041985] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1610.044687] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.034s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.047146] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.572s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.049706] env[62816]: INFO nova.compute.claims [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1610.072911] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.074308] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.074308] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.074308] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.074308] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.074308] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.074308] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.074585] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.074585] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.074711] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.074909] env[62816]: DEBUG nova.virt.hardware [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.075874] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99994f54-c200-41e2-bc04-f5c8780dcfa3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.079706] env[62816]: INFO nova.scheduler.client.report [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Deleted allocations for instance 3c4cca03-b2ee-48a2-9a15-a21124bd6599 [ 1610.088038] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f3947d-8a7e-4640-bb1a-58f0b34df863 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.200904] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788558, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.333610] env[62816]: DEBUG nova.network.neutron [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1610.389522] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a83806-7639-c6c0-6fae-00cbde0eae9f, 'name': SearchDatastore_Task, 'duration_secs': 0.023057} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.389522] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfbd63bb-8950-400a-a543-9587fa78f9e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.395803] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1610.395803] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5281dfc4-f295-4385-14c8-ed3efc404fdc" [ 1610.395803] env[62816]: _type = "Task" [ 1610.395803] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.408360] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281dfc4-f295-4385-14c8-ed3efc404fdc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.589669] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ae674bb-2e08-43f1-9279-9543f87677b2 tempest-ServerShowV254Test-1675790595 tempest-ServerShowV254Test-1675790595-project-member] Lock "3c4cca03-b2ee-48a2-9a15-a21124bd6599" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.398s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.627700] env[62816]: DEBUG nova.network.neutron [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Updating instance_info_cache with network_info: [{"id": "74b0ab45-5abc-4f73-9e97-70674d2c1841", "address": "fa:16:3e:13:4d:61", "network": {"id": "93c67f55-60d8-4819-aa45-3de55c1dfd63", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1357509182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a24ead98c9db4f3f90780f134e14cc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74b0ab45-5a", "ovs_interfaceid": "74b0ab45-5abc-4f73-9e97-70674d2c1841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.720271] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788558, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.748501} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.720271] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b788e586-850b-46e7-a204-d80eac56cce7/b788e586-850b-46e7-a204-d80eac56cce7.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.720271] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1610.720271] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf132a1b-53f3-4a8b-8985-dc51f8833168 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.720271] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1610.720271] env[62816]: value = "task-1788559" [ 1610.720271] env[62816]: _type = "Task" [ 1610.720271] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.729716] env[62816]: DEBUG nova.compute.manager [req-b0d42c34-b483-438d-8313-975f1ca5a398 req-5880e3d6-443d-47bd-ad7f-165bb6ecf015 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Received event network-vif-plugged-75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1610.729716] env[62816]: DEBUG oslo_concurrency.lockutils [req-b0d42c34-b483-438d-8313-975f1ca5a398 req-5880e3d6-443d-47bd-ad7f-165bb6ecf015 service nova] Acquiring lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1610.729716] env[62816]: DEBUG oslo_concurrency.lockutils [req-b0d42c34-b483-438d-8313-975f1ca5a398 req-5880e3d6-443d-47bd-ad7f-165bb6ecf015 service nova] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1610.729716] env[62816]: DEBUG oslo_concurrency.lockutils [req-b0d42c34-b483-438d-8313-975f1ca5a398 req-5880e3d6-443d-47bd-ad7f-165bb6ecf015 service nova] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1610.729716] env[62816]: DEBUG nova.compute.manager [req-b0d42c34-b483-438d-8313-975f1ca5a398 req-5880e3d6-443d-47bd-ad7f-165bb6ecf015 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] No waiting events found dispatching network-vif-plugged-75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1610.729716] env[62816]: WARNING nova.compute.manager [req-b0d42c34-b483-438d-8313-975f1ca5a398 req-5880e3d6-443d-47bd-ad7f-165bb6ecf015 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Received unexpected event network-vif-plugged-75743f5b-f0a7-4280-97cb-0d12ccb870c6 for instance with vm_state building and task_state spawning. [ 1610.735133] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788559, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.779290] env[62816]: DEBUG nova.network.neutron [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Successfully updated port: 75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1610.906747] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281dfc4-f295-4385-14c8-ed3efc404fdc, 'name': SearchDatastore_Task, 'duration_secs': 0.029759} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.907068] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.907774] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.907774] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-601e9ca6-d712-497b-85a4-7960db7c562c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.914438] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1610.914438] env[62816]: value = "task-1788560" [ 1610.914438] env[62816]: _type = "Task" [ 1610.914438] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.923210] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788560, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.129720] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Releasing lock "refresh_cache-ede88298-0eae-4471-b602-c26b5fa7a72a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.130128] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Instance network_info: |[{"id": "74b0ab45-5abc-4f73-9e97-70674d2c1841", "address": "fa:16:3e:13:4d:61", "network": {"id": "93c67f55-60d8-4819-aa45-3de55c1dfd63", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1357509182-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a24ead98c9db4f3f90780f134e14cc1f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f678cd81-6d15-43d5-aab7-d7eedc2ef2d5", "external-id": "nsx-vlan-transportzone-602", "segmentation_id": 602, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74b0ab45-5a", "ovs_interfaceid": "74b0ab45-5abc-4f73-9e97-70674d2c1841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1611.130553] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:13:4d:61', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f678cd81-6d15-43d5-aab7-d7eedc2ef2d5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74b0ab45-5abc-4f73-9e97-70674d2c1841', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1611.138263] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Creating folder: Project (a24ead98c9db4f3f90780f134e14cc1f). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1611.141273] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-756fbfbe-c4d3-4956-aa0a-edbf2ec95521 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.152218] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Created folder: Project (a24ead98c9db4f3f90780f134e14cc1f) in parent group-v370905. [ 1611.152430] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Creating folder: Instances. Parent ref: group-v371076. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1611.152724] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbee4a74-d2fd-4c9a-b1f6-13c302c5fd07 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.162039] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Created folder: Instances in parent group-v371076. [ 1611.162439] env[62816]: DEBUG oslo.service.loopingcall [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1611.162556] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1611.162808] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-46f57b6f-6f9e-4bda-a709-46dca4e50a4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.186944] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1611.186944] env[62816]: value = "task-1788563" [ 1611.186944] env[62816]: _type = "Task" [ 1611.186944] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.198857] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788563, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.231611] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788559, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07123} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.235023] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1611.236213] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77cbdd31-357a-4d40-914a-ef0d422b2376 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.260899] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b788e586-850b-46e7-a204-d80eac56cce7/b788e586-850b-46e7-a204-d80eac56cce7.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1611.264310] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7372ffd1-f7cb-45a5-867d-c2712d95acfd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.283892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "refresh_cache-a60d4ff0-af76-4489-840b-ff7f6c23b2ab" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.283892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "refresh_cache-a60d4ff0-af76-4489-840b-ff7f6c23b2ab" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.284034] env[62816]: DEBUG nova.network.neutron [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1611.287064] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1611.287064] env[62816]: value = "task-1788564" [ 1611.287064] env[62816]: _type = "Task" [ 1611.287064] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.300257] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788564, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.430469] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788560, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.531515] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb462e63-a707-4a48-97c4-36dc0f344a8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.539081] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27444309-29a2-49b7-8a9d-bbeee51b21ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.573045] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf88d0bd-c312-49c4-a7ee-3d008bf07105 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.580817] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a9922b-4ddd-44bd-9835-8ef9d36ebbc4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.594921] env[62816]: DEBUG nova.compute.provider_tree [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1611.698977] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788563, 'name': CreateVM_Task, 'duration_secs': 0.448758} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.699193] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1611.699928] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.700194] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.700556] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1611.700865] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6788fe9-e668-4be3-a4b0-059a7b94189a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.705475] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1611.705475] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529d94f9-25d5-90c6-2ad4-990cd524d20a" [ 1611.705475] env[62816]: _type = "Task" [ 1611.705475] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.713035] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529d94f9-25d5-90c6-2ad4-990cd524d20a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.799427] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788564, 'name': ReconfigVM_Task, 'duration_secs': 0.418856} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.799714] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b788e586-850b-46e7-a204-d80eac56cce7/b788e586-850b-46e7-a204-d80eac56cce7.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.800348] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0a8564f-4fb8-40eb-bfaa-1339eb040e23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.807612] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1611.807612] env[62816]: value = "task-1788565" [ 1611.807612] env[62816]: _type = "Task" [ 1611.807612] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.815643] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788565, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.823663] env[62816]: DEBUG nova.network.neutron [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1611.929442] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788560, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536451} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.929442] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.929442] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.929442] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5d0ac8d6-4d21-4a4a-af08-932437738456 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.940905] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1611.940905] env[62816]: value = "task-1788566" [ 1611.940905] env[62816]: _type = "Task" [ 1611.940905] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.955127] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788566, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.006326] env[62816]: DEBUG nova.network.neutron [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Updating instance_info_cache with network_info: [{"id": "75743f5b-f0a7-4280-97cb-0d12ccb870c6", "address": "fa:16:3e:6f:62:7b", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75743f5b-f0", "ovs_interfaceid": "75743f5b-f0a7-4280-97cb-0d12ccb870c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.098898] env[62816]: DEBUG nova.scheduler.client.report [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1612.215963] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529d94f9-25d5-90c6-2ad4-990cd524d20a, 'name': SearchDatastore_Task, 'duration_secs': 0.052176} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.216299] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.216575] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1612.216823] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.217074] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.217283] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1612.217547] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ceeebb03-6ac7-4737-bd5c-264e0586ddf8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.228989] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1612.229204] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1612.229911] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e55a415c-bfa8-4223-b07b-0b93b9063cbf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.235407] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1612.235407] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ab0a98-332d-20b1-6ed9-af996f1b5a50" [ 1612.235407] env[62816]: _type = "Task" [ 1612.235407] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.243822] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ab0a98-332d-20b1-6ed9-af996f1b5a50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.317678] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788565, 'name': Rename_Task, 'duration_secs': 0.134474} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.318129] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1612.318407] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b81b3dc-0cb6-4b85-92f3-fc8bdcd27346 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.324876] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1612.324876] env[62816]: value = "task-1788567" [ 1612.324876] env[62816]: _type = "Task" [ 1612.324876] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.332182] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788567, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.450979] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788566, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06859} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.451320] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1612.452800] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a391b01-8c26-40c3-971b-eb3fbbbb24b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.474191] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1612.474502] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b917328-08d5-4abf-b8d6-18d37ebcb3d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.494183] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1612.494183] env[62816]: value = "task-1788568" [ 1612.494183] env[62816]: _type = "Task" [ 1612.494183] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.502352] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788568, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.508980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "refresh_cache-a60d4ff0-af76-4489-840b-ff7f6c23b2ab" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.509330] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Instance network_info: |[{"id": "75743f5b-f0a7-4280-97cb-0d12ccb870c6", "address": "fa:16:3e:6f:62:7b", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75743f5b-f0", "ovs_interfaceid": "75743f5b-f0a7-4280-97cb-0d12ccb870c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1612.509742] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:62:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b5c34919-7d52-4a52-bab1-81af4c8182ef', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75743f5b-f0a7-4280-97cb-0d12ccb870c6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1612.518265] env[62816]: DEBUG oslo.service.loopingcall [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1612.518486] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1612.518714] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64dfa88c-47e5-4a8e-84d7-56a9c044649d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.538065] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1612.538065] env[62816]: value = "task-1788569" [ 1612.538065] env[62816]: _type = "Task" [ 1612.538065] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.546541] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788569, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.603678] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.556s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.604264] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1612.607069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.281s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1612.607361] env[62816]: DEBUG nova.objects.instance [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lazy-loading 'resources' on Instance uuid ee543138-1c43-46c4-a512-1977fa5eb3c6 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1612.745289] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ab0a98-332d-20b1-6ed9-af996f1b5a50, 'name': SearchDatastore_Task, 'duration_secs': 0.083373} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.746155] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d57f544b-23e5-46a4-a8f3-c89bc065371c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.752526] env[62816]: DEBUG nova.compute.manager [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Received event network-changed-75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1612.752526] env[62816]: DEBUG nova.compute.manager [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Refreshing instance network info cache due to event network-changed-75743f5b-f0a7-4280-97cb-0d12ccb870c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1612.752526] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] Acquiring lock "refresh_cache-a60d4ff0-af76-4489-840b-ff7f6c23b2ab" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.752526] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] Acquired lock "refresh_cache-a60d4ff0-af76-4489-840b-ff7f6c23b2ab" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.753034] env[62816]: DEBUG nova.network.neutron [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Refreshing network info cache for port 75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1612.755198] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1612.755198] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5216d8d3-cdc6-52f5-4414-2c781cae2e6f" [ 1612.755198] env[62816]: _type = "Task" [ 1612.755198] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.766019] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5216d8d3-cdc6-52f5-4414-2c781cae2e6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.834515] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788567, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.004415] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788568, 'name': ReconfigVM_Task, 'duration_secs': 0.286675} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.004693] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1613.005431] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-77d3a86b-36b3-489b-98b6-60c8f81cae79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.011915] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1613.011915] env[62816]: value = "task-1788570" [ 1613.011915] env[62816]: _type = "Task" [ 1613.011915] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.020085] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788570, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.048396] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788569, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.110066] env[62816]: DEBUG nova.compute.utils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1613.114603] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1613.114757] env[62816]: DEBUG nova.network.neutron [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1613.168078] env[62816]: DEBUG nova.policy [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1613.271620] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5216d8d3-cdc6-52f5-4414-2c781cae2e6f, 'name': SearchDatastore_Task, 'duration_secs': 0.047064} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.271970] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.272197] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ede88298-0eae-4471-b602-c26b5fa7a72a/ede88298-0eae-4471-b602-c26b5fa7a72a.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1613.272462] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-265aa65b-fc7d-4e2f-beab-a628ac5ee7fe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.282143] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1613.282143] env[62816]: value = "task-1788571" [ 1613.282143] env[62816]: _type = "Task" [ 1613.282143] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.290728] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788571, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.334780] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788567, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.528455] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788570, 'name': Rename_Task, 'duration_secs': 0.182039} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.528791] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1613.529082] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa99a382-700a-468d-809a-1a81d8243560 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.535842] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1613.535842] env[62816]: value = "task-1788572" [ 1613.535842] env[62816]: _type = "Task" [ 1613.535842] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.550979] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.557605] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788569, 'name': CreateVM_Task, 'duration_secs': 0.820822} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.558388] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1613.559384] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.559626] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.560023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1613.560286] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80b67fba-0f10-4e5a-8804-6106a1c32a4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.566226] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1613.566226] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f5a4b6-25bc-48f8-5522-4275039b9901" [ 1613.566226] env[62816]: _type = "Task" [ 1613.566226] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.576919] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f5a4b6-25bc-48f8-5522-4275039b9901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.593522] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0805773f-c6fc-4540-992e-3818e939bd67 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.602719] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe2a48b-9f6d-47cd-abf1-8527c76b5f06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.637974] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1613.644031] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c470cd-5488-44e3-ae0d-e5f5adbd78dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.654053] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3538fae-1095-4bc6-8b52-36e71fefe419 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.675809] env[62816]: DEBUG nova.compute.provider_tree [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1613.724137] env[62816]: DEBUG nova.network.neutron [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Updated VIF entry in instance network info cache for port 75743f5b-f0a7-4280-97cb-0d12ccb870c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1613.724137] env[62816]: DEBUG nova.network.neutron [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Updating instance_info_cache with network_info: [{"id": "75743f5b-f0a7-4280-97cb-0d12ccb870c6", "address": "fa:16:3e:6f:62:7b", "network": {"id": "0e2bd323-84cf-4070-99b2-d706174d6d9c", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1675171280-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8179e67e019493a894cd7c67825743c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b5c34919-7d52-4a52-bab1-81af4c8182ef", "external-id": "nsx-vlan-transportzone-458", "segmentation_id": 458, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75743f5b-f0", "ovs_interfaceid": "75743f5b-f0a7-4280-97cb-0d12ccb870c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.738793] env[62816]: DEBUG nova.network.neutron [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Successfully created port: bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1613.795699] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788571, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484388} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.796057] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ede88298-0eae-4471-b602-c26b5fa7a72a/ede88298-0eae-4471-b602-c26b5fa7a72a.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1613.796353] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1613.796708] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35d305ef-5773-4ed1-9e73-e7dc84506726 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.804956] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1613.804956] env[62816]: value = "task-1788573" [ 1613.804956] env[62816]: _type = "Task" [ 1613.804956] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.813998] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788573, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.835217] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788567, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.048985] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788572, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.079036] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f5a4b6-25bc-48f8-5522-4275039b9901, 'name': SearchDatastore_Task, 'duration_secs': 0.050281} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.079036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.079036] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.079036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.079036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.079036] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.079036] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2adc9d14-d959-4c6b-883e-1965959b284a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.094625] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.095169] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1614.097025] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fc7b1b6-6671-47b1-abee-26bfc0a0fb2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.104018] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1614.104018] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5274853a-626e-6ffd-06e6-8741080cc4b1" [ 1614.104018] env[62816]: _type = "Task" [ 1614.104018] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.113835] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5274853a-626e-6ffd-06e6-8741080cc4b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.180125] env[62816]: DEBUG nova.scheduler.client.report [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1614.226952] env[62816]: DEBUG oslo_concurrency.lockutils [req-5d91d2d5-cb74-4c35-b245-8f4ec9c5c65c req-61bb4f75-d561-457b-9929-10c1e6ea1234 service nova] Releasing lock "refresh_cache-a60d4ff0-af76-4489-840b-ff7f6c23b2ab" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.315687] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788573, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07868} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.316103] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1614.316746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d935e4-56bd-4f4c-8d89-c065fb8f1f31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.339477] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] ede88298-0eae-4471-b602-c26b5fa7a72a/ede88298-0eae-4471-b602-c26b5fa7a72a.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1614.342657] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7d29ea0-4977-4c6f-8d43-d562a5d91876 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.362258] env[62816]: DEBUG oslo_vmware.api [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788567, 'name': PowerOnVM_Task, 'duration_secs': 1.519059} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.363574] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1614.363789] env[62816]: INFO nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Took 9.55 seconds to spawn the instance on the hypervisor. [ 1614.363971] env[62816]: DEBUG nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1614.364321] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1614.364321] env[62816]: value = "task-1788574" [ 1614.364321] env[62816]: _type = "Task" [ 1614.364321] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.365015] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcb74568-5a74-4924-9d66-5c8e6f6b9e45 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.379425] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788574, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.549246] env[62816]: DEBUG oslo_vmware.api [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788572, 'name': PowerOnVM_Task, 'duration_secs': 0.76254} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.549516] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1614.549727] env[62816]: DEBUG nova.compute.manager [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1614.550539] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6b9133-fd38-4685-a2ff-862d556b57cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.614126] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5274853a-626e-6ffd-06e6-8741080cc4b1, 'name': SearchDatastore_Task, 'duration_secs': 0.033928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.615008] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6a49a53-fe9b-4d84-a5f4-5c56d4a02cf0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.621287] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1614.621287] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52dd8bd7-f018-51a5-9ecb-8dd25e7acab8" [ 1614.621287] env[62816]: _type = "Task" [ 1614.621287] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.633178] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd8bd7-f018-51a5-9ecb-8dd25e7acab8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.651299] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1614.680764] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1614.680764] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1614.680764] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1614.680764] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1614.680764] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1614.681126] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1614.681126] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1614.681225] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1614.681340] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1614.681498] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1614.681674] env[62816]: DEBUG nova.virt.hardware [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1614.682608] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796edbd5-88d6-4c15-b636-fafeecd1960e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.690048] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.083s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.692211] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 26.888s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.695467] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ee5894-5b7f-4101-b971-73868399946c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.712139] env[62816]: INFO nova.scheduler.client.report [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Deleted allocations for instance ee543138-1c43-46c4-a512-1977fa5eb3c6 [ 1614.878710] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788574, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.888023] env[62816]: INFO nova.compute.manager [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Took 42.60 seconds to build instance. [ 1615.067848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.131618] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd8bd7-f018-51a5-9ecb-8dd25e7acab8, 'name': SearchDatastore_Task, 'duration_secs': 0.040435} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.132308] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.132585] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a60d4ff0-af76-4489-840b-ff7f6c23b2ab/a60d4ff0-af76-4489-840b-ff7f6c23b2ab.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1615.132856] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eeaf22a3-25d0-4ef5-b35a-e609a0b80a0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.140336] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1615.140336] env[62816]: value = "task-1788575" [ 1615.140336] env[62816]: _type = "Task" [ 1615.140336] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.147808] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788575, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.200838] env[62816]: DEBUG nova.objects.instance [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lazy-loading 'migration_context' on Instance uuid 9bda24c6-f950-47ff-ad3c-ff745291870c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1615.220288] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b774a9ee-8880-4112-a501-67d3e7487e0d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "ee543138-1c43-46c4-a512-1977fa5eb3c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.109s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.374865] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "e003e41d-93e8-4258-b8ca-3c2420b73df0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.375267] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.375491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "e003e41d-93e8-4258-b8ca-3c2420b73df0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.375818] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.375950] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.378712] env[62816]: INFO nova.compute.manager [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Terminating instance [ 1615.384197] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788574, 'name': ReconfigVM_Task, 'duration_secs': 1.01917} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.384916] env[62816]: DEBUG nova.compute.manager [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1615.385139] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1615.385484] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Reconfigured VM instance instance-0000003c to attach disk [datastore1] ede88298-0eae-4471-b602-c26b5fa7a72a/ede88298-0eae-4471-b602-c26b5fa7a72a.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1615.387650] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d7b1d8-1b6d-47a4-941a-33e24d0797f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.389881] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b5a30a8-e015-4ade-ba4b-fe991ddeb9a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.391726] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b963795-10af-46c5-b519-79a1b2ac2585 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "b788e586-850b-46e7-a204-d80eac56cce7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.134s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.397527] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1615.399201] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-053238a6-c6d2-4bb5-bc83-bbdacda4eb22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.401461] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1615.401461] env[62816]: value = "task-1788576" [ 1615.401461] env[62816]: _type = "Task" [ 1615.401461] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.407664] env[62816]: DEBUG oslo_vmware.api [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1615.407664] env[62816]: value = "task-1788577" [ 1615.407664] env[62816]: _type = "Task" [ 1615.407664] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.415330] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788576, 'name': Rename_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.426032] env[62816]: DEBUG oslo_vmware.api [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.569428] env[62816]: DEBUG nova.compute.manager [req-b94d2072-10a3-423f-a247-96284fc17b91 req-fd325f8a-1862-4a05-8f5c-c978d31e46b0 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-vif-plugged-bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.569674] env[62816]: DEBUG oslo_concurrency.lockutils [req-b94d2072-10a3-423f-a247-96284fc17b91 req-fd325f8a-1862-4a05-8f5c-c978d31e46b0 service nova] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.569931] env[62816]: DEBUG oslo_concurrency.lockutils [req-b94d2072-10a3-423f-a247-96284fc17b91 req-fd325f8a-1862-4a05-8f5c-c978d31e46b0 service nova] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.572533] env[62816]: DEBUG oslo_concurrency.lockutils [req-b94d2072-10a3-423f-a247-96284fc17b91 req-fd325f8a-1862-4a05-8f5c-c978d31e46b0 service nova] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.572827] env[62816]: DEBUG nova.compute.manager [req-b94d2072-10a3-423f-a247-96284fc17b91 req-fd325f8a-1862-4a05-8f5c-c978d31e46b0 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] No waiting events found dispatching network-vif-plugged-bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1615.573079] env[62816]: WARNING nova.compute.manager [req-b94d2072-10a3-423f-a247-96284fc17b91 req-fd325f8a-1862-4a05-8f5c-c978d31e46b0 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received unexpected event network-vif-plugged-bed0373b-9c6a-4357-a640-8218a972cb72 for instance with vm_state building and task_state spawning. [ 1615.587920] env[62816]: DEBUG nova.network.neutron [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Successfully updated port: bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1615.650872] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788575, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.866023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "b788e586-850b-46e7-a204-d80eac56cce7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.866023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "b788e586-850b-46e7-a204-d80eac56cce7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.866023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "b788e586-850b-46e7-a204-d80eac56cce7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.866023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "b788e586-850b-46e7-a204-d80eac56cce7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.866023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "b788e586-850b-46e7-a204-d80eac56cce7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.867389] env[62816]: INFO nova.compute.manager [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Terminating instance [ 1615.869169] env[62816]: DEBUG nova.compute.manager [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1615.869394] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1615.870262] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c55944-81d6-4e64-8fae-9fe91305d310 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.878276] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1615.878563] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-54c8d261-3fe2-4790-8fac-85e1161f0780 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.884451] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1615.884451] env[62816]: value = "task-1788578" [ 1615.884451] env[62816]: _type = "Task" [ 1615.884451] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.893847] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.909927] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788576, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.919719] env[62816]: DEBUG oslo_vmware.api [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788577, 'name': PowerOffVM_Task, 'duration_secs': 0.30851} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.919967] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1615.920156] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1615.920390] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33b35759-571f-4459-9640-e1984e74418d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.023443] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1616.023684] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1616.023872] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Deleting the datastore file [datastore1] e003e41d-93e8-4258-b8ca-3c2420b73df0 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.024154] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfcb983e-e92b-4936-a600-f46bd658d68a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.032341] env[62816]: DEBUG oslo_vmware.api [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for the task: (returnval){ [ 1616.032341] env[62816]: value = "task-1788580" [ 1616.032341] env[62816]: _type = "Task" [ 1616.032341] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.037790] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60eced1-0daa-4e9f-b5db-2cf8b62e70a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.043070] env[62816]: DEBUG oslo_vmware.api [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.048067] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bd2dea-39ac-45f0-bd44-3c301f76e7b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.079227] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d758d0-03f6-44e1-8dbf-afd29190f66a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.087655] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b96a8c-316d-4109-b78c-b5779b63a1c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.091762] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.091861] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.092807] env[62816]: DEBUG nova.network.neutron [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1616.106977] env[62816]: DEBUG nova.compute.provider_tree [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1616.153290] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788575, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579237} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.153528] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a60d4ff0-af76-4489-840b-ff7f6c23b2ab/a60d4ff0-af76-4489-840b-ff7f6c23b2ab.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1616.153836] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1616.154073] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3c88408-1f3c-4e23-9163-8f080191f97f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.163553] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1616.163553] env[62816]: value = "task-1788581" [ 1616.163553] env[62816]: _type = "Task" [ 1616.163553] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.170878] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.188945] env[62816]: INFO nova.compute.manager [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Rebuilding instance [ 1616.230761] env[62816]: DEBUG nova.compute.manager [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1616.231632] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e06b386-9baf-4f09-941d-8834d53c1977 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.394346] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.411509] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788576, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.542325] env[62816]: DEBUG oslo_vmware.api [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Task: {'id': task-1788580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196438} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.542631] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1616.542831] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1616.543016] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1616.543204] env[62816]: INFO nova.compute.manager [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1616.543447] env[62816]: DEBUG oslo.service.loopingcall [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.543668] env[62816]: DEBUG nova.compute.manager [-] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1616.543760] env[62816]: DEBUG nova.network.neutron [-] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1616.611326] env[62816]: DEBUG nova.scheduler.client.report [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1616.638675] env[62816]: DEBUG nova.network.neutron [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1616.675079] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072777} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.675615] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1616.676536] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5a83a9-a4bf-4eae-a43a-6b5e49b393be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.709143] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] a60d4ff0-af76-4489-840b-ff7f6c23b2ab/a60d4ff0-af76-4489-840b-ff7f6c23b2ab.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1616.712868] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56729db0-7049-493c-9294-b6acbe2dbc77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.734121] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1616.734121] env[62816]: value = "task-1788582" [ 1616.734121] env[62816]: _type = "Task" [ 1616.734121] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.745377] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1616.745885] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.746223] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-802113dd-9f73-4cf7-a773-2d0247fab36c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.751996] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1616.751996] env[62816]: value = "task-1788583" [ 1616.751996] env[62816]: _type = "Task" [ 1616.751996] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.759945] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788583, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.896409] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788578, 'name': PowerOffVM_Task, 'duration_secs': 0.996356} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.896859] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1616.897161] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1616.897532] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc1643f1-496a-4059-8b4b-f17226ea79ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.901024] env[62816]: DEBUG nova.network.neutron [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.911749] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788576, 'name': Rename_Task, 'duration_secs': 1.277028} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.913125] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1616.913125] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c253b71e-0357-431b-b8b1-203bec604d30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.918650] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1616.918650] env[62816]: value = "task-1788585" [ 1616.918650] env[62816]: _type = "Task" [ 1616.918650] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.927581] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788585, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.983501] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1616.983780] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1616.983973] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Deleting the datastore file [datastore1] b788e586-850b-46e7-a204-d80eac56cce7 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1616.984266] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb39d86f-daeb-49d9-bf43-f0adeb3e7139 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.990746] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for the task: (returnval){ [ 1616.990746] env[62816]: value = "task-1788586" [ 1616.990746] env[62816]: _type = "Task" [ 1616.990746] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.999019] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788586, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.245161] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788582, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.261915] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788583, 'name': PowerOffVM_Task, 'duration_secs': 0.32157} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.262336] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1617.262560] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1617.263327] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ec02ae9-90a7-4a83-af59-bbd0b8375890 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.270119] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1617.270367] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31fc7971-b6a5-460e-beda-9d9b5c154762 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.369817] env[62816]: DEBUG nova.network.neutron [-] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.403818] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.404157] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Instance network_info: |[{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1617.404579] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:90:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bed0373b-9c6a-4357-a640-8218a972cb72', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1617.411942] env[62816]: DEBUG oslo.service.loopingcall [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.412165] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1617.412385] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd534740-deac-424c-9f0c-e9b1f5dc036b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.437757] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788585, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.438955] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1617.438955] env[62816]: value = "task-1788588" [ 1617.438955] env[62816]: _type = "Task" [ 1617.438955] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.442646] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1617.442851] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1617.443040] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleting the datastore file [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1617.443603] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3ae50a2-d2e6-4a1c-b2e0-11a63009db2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.448639] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788588, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.449728] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1617.449728] env[62816]: value = "task-1788589" [ 1617.449728] env[62816]: _type = "Task" [ 1617.449728] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.458273] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788589, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.500948] env[62816]: DEBUG oslo_vmware.api [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Task: {'id': task-1788586, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184015} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.501248] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1617.501439] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1617.501617] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1617.501793] env[62816]: INFO nova.compute.manager [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1617.502045] env[62816]: DEBUG oslo.service.loopingcall [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1617.502246] env[62816]: DEBUG nova.compute.manager [-] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1617.502337] env[62816]: DEBUG nova.network.neutron [-] [instance: b788e586-850b-46e7-a204-d80eac56cce7] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1617.623102] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.931s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.629408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.940s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.630891] env[62816]: INFO nova.compute.claims [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.748886] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788582, 'name': ReconfigVM_Task, 'duration_secs': 0.625003} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.749261] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Reconfigured VM instance instance-0000003d to attach disk [datastore1] a60d4ff0-af76-4489-840b-ff7f6c23b2ab/a60d4ff0-af76-4489-840b-ff7f6c23b2ab.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1617.749948] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba9c5715-8484-4454-abf2-4a04207c57e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.756244] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1617.756244] env[62816]: value = "task-1788590" [ 1617.756244] env[62816]: _type = "Task" [ 1617.756244] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.764137] env[62816]: DEBUG nova.compute.manager [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-changed-bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.764333] env[62816]: DEBUG nova.compute.manager [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing instance network info cache due to event network-changed-bed0373b-9c6a-4357-a640-8218a972cb72. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1617.764571] env[62816]: DEBUG oslo_concurrency.lockutils [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.764697] env[62816]: DEBUG oslo_concurrency.lockutils [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.764832] env[62816]: DEBUG nova.network.neutron [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing network info cache for port bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.769896] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788590, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.872581] env[62816]: INFO nova.compute.manager [-] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Took 1.33 seconds to deallocate network for instance. [ 1617.938655] env[62816]: DEBUG oslo_vmware.api [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788585, 'name': PowerOnVM_Task, 'duration_secs': 0.754494} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.938655] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1617.938835] env[62816]: INFO nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Took 10.45 seconds to spawn the instance on the hypervisor. [ 1617.939013] env[62816]: DEBUG nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1617.940236] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4591c003-329d-4006-b834-6c8015db11bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.964217] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788588, 'name': CreateVM_Task, 'duration_secs': 0.439294} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.966336] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1617.970030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.970030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.970030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1617.970030] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5cfb834d-0a98-440e-a7a2-ec66ac881aff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.974021] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788589, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134245} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.975221] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1617.975404] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1617.975590] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1617.979997] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1617.979997] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a83cee-8c55-0ad3-dc4a-9f333057ad83" [ 1617.979997] env[62816]: _type = "Task" [ 1617.979997] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.991121] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a83cee-8c55-0ad3-dc4a-9f333057ad83, 'name': SearchDatastore_Task, 'duration_secs': 0.009567} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.991406] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.991636] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1617.991861] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.992095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.992284] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1617.992529] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c953bada-da13-407a-abe1-153c5f693f0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.000651] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1618.000835] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1618.001973] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ec2326b-d35f-41a7-87e3-2b90adae4ce7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.006990] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1618.006990] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52778d06-bb08-3c95-177f-25a6f8dc79e6" [ 1618.006990] env[62816]: _type = "Task" [ 1618.006990] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.015685] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52778d06-bb08-3c95-177f-25a6f8dc79e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.267850] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788590, 'name': Rename_Task, 'duration_secs': 0.232244} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.268148] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1618.268547] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a75cbbd-ead9-4714-ae01-05f675676ba5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.277072] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1618.277072] env[62816]: value = "task-1788591" [ 1618.277072] env[62816]: _type = "Task" [ 1618.277072] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.283947] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788591, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.379383] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1618.471090] env[62816]: INFO nova.compute.manager [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Took 42.79 seconds to build instance. [ 1618.471090] env[62816]: DEBUG nova.network.neutron [-] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.519137] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52778d06-bb08-3c95-177f-25a6f8dc79e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.520468] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fd93ef1-a386-4033-86ec-ac5c329ad8b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.526883] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1618.526883] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52eef6c5-2649-c8da-e9b8-da2725d43462" [ 1618.526883] env[62816]: _type = "Task" [ 1618.526883] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.536202] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52eef6c5-2649-c8da-e9b8-da2725d43462, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.541159] env[62816]: DEBUG nova.network.neutron [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updated VIF entry in instance network info cache for port bed0373b-9c6a-4357-a640-8218a972cb72. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.541519] env[62816]: DEBUG nova.network.neutron [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.786323] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788591, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.974222] env[62816]: INFO nova.compute.manager [-] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Took 1.47 seconds to deallocate network for instance. [ 1618.974786] env[62816]: DEBUG oslo_concurrency.lockutils [None req-86d9402a-48f8-42d7-ab49-bab0b9297fb7 tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.310s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1618.994746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa1418c-e1f1-43a8-b596-4f9d271f0124 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.002990] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bed8bc-764e-4fd9-a6ab-15ea481ad3e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.041025] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1619.041315] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1619.041478] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1619.041664] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1619.041816] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1619.041967] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1619.042193] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1619.042461] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1619.042664] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1619.042840] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1619.043064] env[62816]: DEBUG nova.virt.hardware [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1619.046704] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f75213b-1e41-4223-b9af-f2ed05576f6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.049801] env[62816]: DEBUG oslo_concurrency.lockutils [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.050133] env[62816]: DEBUG nova.compute.manager [req-45162378-3b0a-456d-bf2e-90f76b85929b req-ef648bb4-11f9-404c-8689-1b785a00655b service nova] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Received event network-vif-deleted-933a5fba-0d17-4a7e-ba84-e96d67fb89c2 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1619.051010] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6aaf29f-5ef6-4d04-8216-8a4309b4d14f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.059513] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52eef6c5-2649-c8da-e9b8-da2725d43462, 'name': SearchDatastore_Task, 'duration_secs': 0.010151} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.063500] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.063738] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] dd833e38-691c-4757-9c6b-659c74343d3e/dd833e38-691c-4757-9c6b-659c74343d3e.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1619.064068] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b509001-4d1e-4b5a-91a3-2e26ec9552bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.067064] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4cf091-7a74-48d8-ba2d-74d692224413 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.071525] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c256a4b2-798c-4c63-806a-5f5b0179a1a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.095388] env[62816]: DEBUG nova.compute.provider_tree [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.097585] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:cb:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37434b93-dfdc-4a3f-bf5a-9f2cbe25a754', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2b6a990-a634-4e68-ba4c-886b856209a5', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1619.105058] env[62816]: DEBUG oslo.service.loopingcall [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.106796] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1619.107155] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1619.107155] env[62816]: value = "task-1788592" [ 1619.107155] env[62816]: _type = "Task" [ 1619.107155] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.109334] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89a871b4-a4aa-484d-9552-de2ef2ecd907 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.133562] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788592, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.134907] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1619.134907] env[62816]: value = "task-1788593" [ 1619.134907] env[62816]: _type = "Task" [ 1619.134907] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.143696] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788593, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.171191] env[62816]: INFO nova.compute.manager [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Swapping old allocation on dict_keys(['27f49c85-1bb9-4d17-a914-e2f45a5e84fa']) held by migration 7b53fd5b-adc7-497d-8fbe-fa8da06269e9 for instance [ 1619.202796] env[62816]: DEBUG nova.scheduler.client.report [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Overwriting current allocation {'allocations': {'27f49c85-1bb9-4d17-a914-e2f45a5e84fa': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 90}}, 'project_id': 'f981032701b04b14841045ed05cbe9a6', 'user_id': 'f062fc536a1c4bbeabcb41197b1bc4fd', 'consumer_generation': 1} on consumer 9bda24c6-f950-47ff-ad3c-ff745291870c {{(pid=62816) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1619.287363] env[62816]: DEBUG oslo_vmware.api [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788591, 'name': PowerOnVM_Task, 'duration_secs': 0.957796} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.287674] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1619.287969] env[62816]: INFO nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1619.288198] env[62816]: DEBUG nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1619.289247] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365b0a90-3f09-4d0f-9bcf-83db3c9e2a80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.304018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1619.304018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquired lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1619.304018] env[62816]: DEBUG nova.network.neutron [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1619.314799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "ede88298-0eae-4471-b602-c26b5fa7a72a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.315072] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.315287] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "ede88298-0eae-4471-b602-c26b5fa7a72a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.315470] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.315707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.317941] env[62816]: INFO nova.compute.manager [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Terminating instance [ 1619.322269] env[62816]: DEBUG nova.compute.manager [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1619.322479] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1619.323658] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1edc3e4-a014-483d-8d51-9088fdf67560 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.333976] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1619.334270] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-548fb519-d303-4c73-8ade-5ee828a1fe07 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.341634] env[62816]: DEBUG oslo_vmware.api [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1619.341634] env[62816]: value = "task-1788594" [ 1619.341634] env[62816]: _type = "Task" [ 1619.341634] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.349708] env[62816]: DEBUG oslo_vmware.api [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788594, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.484346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.607812] env[62816]: DEBUG nova.scheduler.client.report [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1619.634034] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788592, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452488} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.634256] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] dd833e38-691c-4757-9c6b-659c74343d3e/dd833e38-691c-4757-9c6b-659c74343d3e.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1619.634471] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1619.634726] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-15c1d013-4feb-4b10-a140-7892dee0b876 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.645269] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788593, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.646287] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1619.646287] env[62816]: value = "task-1788595" [ 1619.646287] env[62816]: _type = "Task" [ 1619.646287] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.654159] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788595, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.788982] env[62816]: DEBUG nova.compute.manager [req-fb61318d-e45e-40c7-abe9-14ac8a2a759f req-da12d8bc-a81c-4abf-b58a-e5d1af69aad9 service nova] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Received event network-vif-deleted-3e105313-389a-4820-a69d-f8e130383f4f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1619.812900] env[62816]: INFO nova.compute.manager [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Took 42.91 seconds to build instance. [ 1619.850765] env[62816]: DEBUG oslo_vmware.api [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788594, 'name': PowerOffVM_Task, 'duration_secs': 0.305588} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.851109] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1619.851334] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1619.851602] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5579147e-13b9-4437-8457-601b0f62cb60 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.973316] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1619.973624] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1619.973827] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Deleting the datastore file [datastore1] ede88298-0eae-4471-b602-c26b5fa7a72a {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1619.976631] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-150b0e0e-76aa-4342-a54c-f7bb09619402 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.982885] env[62816]: DEBUG oslo_vmware.api [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for the task: (returnval){ [ 1619.982885] env[62816]: value = "task-1788597" [ 1619.982885] env[62816]: _type = "Task" [ 1619.982885] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.990466] env[62816]: DEBUG oslo_vmware.api [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.084580] env[62816]: DEBUG nova.network.neutron [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [{"id": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "address": "fa:16:3e:ed:42:11", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.96", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b2b9d44-f6", "ovs_interfaceid": "5b2b9d44-f66e-428f-a75c-6e213ebdb364", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1620.113400] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.484s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.114151] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1620.117406] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.060s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.118048] env[62816]: DEBUG nova.objects.instance [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lazy-loading 'resources' on Instance uuid e1067d45-1938-4021-b902-21a1aa57058a {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1620.150063] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788593, 'name': CreateVM_Task, 'duration_secs': 0.534229} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.150063] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1620.151018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.151328] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.151728] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1620.152088] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28b0ba3e-fc0d-42d0-9d11-646dff319892 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.160718] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788595, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063729} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.160718] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1620.161215] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1620.161215] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5281b2df-5a95-6d0a-6801-5f71aaa82cb7" [ 1620.161215] env[62816]: _type = "Task" [ 1620.161215] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.161908] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2381be0b-8176-49a0-ba96-507907db8ace {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.173704] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281b2df-5a95-6d0a-6801-5f71aaa82cb7, 'name': SearchDatastore_Task, 'duration_secs': 0.01015} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.185099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.185358] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.185591] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.185767] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.185955] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1620.196134] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] dd833e38-691c-4757-9c6b-659c74343d3e/dd833e38-691c-4757-9c6b-659c74343d3e.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1620.196280] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dd17a23-f16b-4235-b9c9-c545b185dbc9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.198239] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c253ad8-adc8-4328-a773-9e81f6bf6b4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.222519] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1620.222519] env[62816]: value = "task-1788598" [ 1620.222519] env[62816]: _type = "Task" [ 1620.222519] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.228016] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1620.228228] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1620.229691] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a29c05a-78d8-4c5f-9d00-99c55e8d449a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.235200] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788598, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.238264] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1620.238264] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528c5c5b-b879-271e-8f8d-80215df23fab" [ 1620.238264] env[62816]: _type = "Task" [ 1620.238264] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.246051] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528c5c5b-b879-271e-8f8d-80215df23fab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.315416] env[62816]: DEBUG oslo_concurrency.lockutils [None req-27f218be-7853-4ac4-8b0d-f4b5521b65a5 tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.422s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.493449] env[62816]: DEBUG oslo_vmware.api [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Task: {'id': task-1788597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134841} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.493767] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1620.495240] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1620.495240] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1620.495240] env[62816]: INFO nova.compute.manager [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1620.495517] env[62816]: DEBUG oslo.service.loopingcall [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1620.496094] env[62816]: DEBUG nova.compute.manager [-] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1620.496207] env[62816]: DEBUG nova.network.neutron [-] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1620.587913] env[62816]: DEBUG oslo_concurrency.lockutils [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Releasing lock "refresh_cache-9bda24c6-f950-47ff-ad3c-ff745291870c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.588463] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1620.588747] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c68d848d-5def-43bb-aad9-9a6ec12e2bef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.595729] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1620.595729] env[62816]: value = "task-1788599" [ 1620.595729] env[62816]: _type = "Task" [ 1620.595729] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.603825] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.621283] env[62816]: DEBUG nova.compute.utils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1620.626021] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1620.626021] env[62816]: DEBUG nova.network.neutron [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1620.694603] env[62816]: DEBUG nova.policy [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '894800c94e634a3e861316db6f882f2f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b28bf1d21ff41fca85f02679ce1d4fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1620.719576] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.719826] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.722387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.722387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.722387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.735475] env[62816]: INFO nova.compute.manager [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Terminating instance [ 1620.737716] env[62816]: DEBUG nova.compute.manager [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1620.737915] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1620.740572] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302bee52-0219-4bf8-bb4b-122fcbb3e34c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.749944] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788598, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.760936] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528c5c5b-b879-271e-8f8d-80215df23fab, 'name': SearchDatastore_Task, 'duration_secs': 0.03613} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.763801] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1620.764042] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a277da9-ea63-419d-a239-f7fcc7cbb8b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.766202] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eaff5ba0-6e0b-4e09-9bfb-955a5ad6517a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.772936] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1620.772936] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f91634-58a0-07a7-e6fa-798b426e5b9d" [ 1620.772936] env[62816]: _type = "Task" [ 1620.772936] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.777028] env[62816]: DEBUG oslo_vmware.api [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1620.777028] env[62816]: value = "task-1788600" [ 1620.777028] env[62816]: _type = "Task" [ 1620.777028] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.782832] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f91634-58a0-07a7-e6fa-798b426e5b9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.791256] env[62816]: DEBUG oslo_vmware.api [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.003693] env[62816]: DEBUG nova.network.neutron [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Successfully created port: 7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.042024] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ce420d-f8c9-4505-887c-ec1f1efa4485 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.048070] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fd4e49-93e7-4b8b-a567-4ba2859341a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.077926] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d53b329-0828-45ba-af8c-0a69d7d5c3e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.085383] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba9f68b-fa2b-46d9-a0e4-0a719aafd24c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.101797] env[62816]: DEBUG nova.compute.provider_tree [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.108094] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788599, 'name': PowerOffVM_Task, 'duration_secs': 0.403109} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.108913] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1621.109659] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0af33be3-1673-42f1-a298-c50b616c7610',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2008575729',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1621.109780] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1621.109934] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1621.110128] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1621.110270] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1621.110414] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1621.110611] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1621.110776] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1621.110936] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1621.111103] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1621.111273] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1621.116510] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c3e057d-8238-4455-87cc-87a29e346f25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.130184] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1621.134298] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1621.134298] env[62816]: value = "task-1788601" [ 1621.134298] env[62816]: _type = "Task" [ 1621.134298] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.143308] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788601, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.233812] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788598, 'name': ReconfigVM_Task, 'duration_secs': 0.841076} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.234411] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfigured VM instance instance-0000003e to attach disk [datastore1] dd833e38-691c-4757-9c6b-659c74343d3e/dd833e38-691c-4757-9c6b-659c74343d3e.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1621.235200] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5151cd95-c9ef-4f26-86a0-090427ff105e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.244020] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1621.244020] env[62816]: value = "task-1788602" [ 1621.244020] env[62816]: _type = "Task" [ 1621.244020] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.251655] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788602, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.287286] env[62816]: DEBUG oslo_vmware.api [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788600, 'name': PowerOffVM_Task, 'duration_secs': 0.211413} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.290574] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1621.290754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1621.291015] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f91634-58a0-07a7-e6fa-798b426e5b9d, 'name': SearchDatastore_Task, 'duration_secs': 0.022235} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.291221] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4891ca92-633b-4f98-820a-bede3e393594 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.292706] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.292966] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1621.293221] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-77ae340b-ddc0-4eeb-a0c2-f1e16c85bcfb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.301432] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1621.301432] env[62816]: value = "task-1788604" [ 1621.301432] env[62816]: _type = "Task" [ 1621.301432] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.309233] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788604, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.313628] env[62816]: DEBUG nova.network.neutron [-] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1621.401181] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1621.401181] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1621.401410] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleting the datastore file [datastore1] a60d4ff0-af76-4489-840b-ff7f6c23b2ab {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1621.401615] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2867e8a9-5bd3-45d0-b90a-31444b96a446 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.408610] env[62816]: DEBUG oslo_vmware.api [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for the task: (returnval){ [ 1621.408610] env[62816]: value = "task-1788605" [ 1621.408610] env[62816]: _type = "Task" [ 1621.408610] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.416255] env[62816]: DEBUG oslo_vmware.api [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.605898] env[62816]: DEBUG nova.scheduler.client.report [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1621.649249] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788601, 'name': ReconfigVM_Task, 'duration_secs': 0.146297} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.650186] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4119a74-c7a3-4c6d-8c16-694965713e70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.678914] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:52:03Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='0af33be3-1673-42f1-a298-c50b616c7610',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2008575729',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1621.679303] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1621.679521] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1621.679859] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1621.679996] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1621.680247] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1621.680565] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1621.680800] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1621.681056] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1621.681427] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1621.681525] env[62816]: DEBUG nova.virt.hardware [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1621.683705] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac4b52be-4761-4f2c-8d86-cee05983993e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.691997] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1621.691997] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5282f06b-fb31-8e99-ec23-71cc52ca7b31" [ 1621.691997] env[62816]: _type = "Task" [ 1621.691997] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.704092] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5282f06b-fb31-8e99-ec23-71cc52ca7b31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.753129] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788602, 'name': Rename_Task, 'duration_secs': 0.137575} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.753421] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1621.753704] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c65adbcb-c957-4607-a57e-f51cb55f83fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.761043] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1621.761043] env[62816]: value = "task-1788606" [ 1621.761043] env[62816]: _type = "Task" [ 1621.761043] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.768910] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788606, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.812012] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788604, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475493} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.812318] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1621.812532] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1621.813793] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f8e9710-cced-4341-9f3e-386c6edb399f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.816674] env[62816]: DEBUG nova.compute.manager [req-e480bb92-da0e-4fef-8faa-37e97fc99ec8 req-ff74d7a4-183b-46a1-b153-39e90923d17f service nova] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Received event network-vif-deleted-74b0ab45-5abc-4f73-9e97-70674d2c1841 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1621.817413] env[62816]: INFO nova.compute.manager [-] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Took 1.32 seconds to deallocate network for instance. [ 1621.824079] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1621.824079] env[62816]: value = "task-1788607" [ 1621.824079] env[62816]: _type = "Task" [ 1621.824079] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.831933] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788607, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.920834] env[62816]: DEBUG oslo_vmware.api [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Task: {'id': task-1788605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442556} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.921217] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1621.921463] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1621.921765] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1621.922074] env[62816]: INFO nova.compute.manager [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1621.922437] env[62816]: DEBUG oslo.service.loopingcall [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1621.922712] env[62816]: DEBUG nova.compute.manager [-] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1621.922858] env[62816]: DEBUG nova.network.neutron [-] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1622.111298] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.113572] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.906s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.113835] env[62816]: DEBUG nova.objects.instance [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lazy-loading 'resources' on Instance uuid 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1622.131382] env[62816]: INFO nova.scheduler.client.report [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Deleted allocations for instance e1067d45-1938-4021-b902-21a1aa57058a [ 1622.145884] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1622.175929] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1622.176220] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1622.176390] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.176577] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1622.176734] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.176879] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1622.177100] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1622.177289] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1622.177420] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1622.177582] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1622.177754] env[62816]: DEBUG nova.virt.hardware [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1622.178894] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d6feca-a599-4f2d-a2bd-f050238f7b70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.187054] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f2ec50-d0e5-4764-a8b1-ac8b50ea4bc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.210572] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5282f06b-fb31-8e99-ec23-71cc52ca7b31, 'name': SearchDatastore_Task, 'duration_secs': 0.032346} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.216015] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfiguring VM instance instance-0000002b to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1622.216506] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-790ca2f1-3537-4cd1-955d-d0ac87413626 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.235090] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1622.235090] env[62816]: value = "task-1788608" [ 1622.235090] env[62816]: _type = "Task" [ 1622.235090] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.244187] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.270851] env[62816]: DEBUG oslo_vmware.api [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1788606, 'name': PowerOnVM_Task, 'duration_secs': 0.478869} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.271340] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1622.271432] env[62816]: INFO nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Took 7.62 seconds to spawn the instance on the hypervisor. [ 1622.271542] env[62816]: DEBUG nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1622.272330] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dccff84-27e7-455c-8a4e-6ed9e18d9315 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.323393] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.334191] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788607, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065806} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.334467] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1622.335253] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb192f7a-e086-4c69-9ae1-73d549475d5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.357851] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1622.358321] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62bced45-5440-4533-8fb5-0b1599945a1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.379165] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1622.379165] env[62816]: value = "task-1788609" [ 1622.379165] env[62816]: _type = "Task" [ 1622.379165] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.387507] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788609, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.547672] env[62816]: DEBUG nova.network.neutron [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Successfully updated port: 7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1622.616489] env[62816]: DEBUG nova.objects.instance [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lazy-loading 'numa_topology' on Instance uuid 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1622.642086] env[62816]: DEBUG oslo_concurrency.lockutils [None req-34b81595-e2a9-4f84-a6dc-88c1c63ceb92 tempest-ListImageFiltersTestJSON-413507774 tempest-ListImageFiltersTestJSON-413507774-project-member] Lock "e1067d45-1938-4021-b902-21a1aa57058a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.040s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.653995] env[62816]: DEBUG nova.network.neutron [-] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.745858] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788608, 'name': ReconfigVM_Task, 'duration_secs': 0.198398} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.746126] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfigured VM instance instance-0000002b to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1622.746960] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca33d496-edea-49aa-8a04-bc46dc23396c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.770045] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1622.770379] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab6fc08b-f9c4-4fb6-a7b8-b9151f907b14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.795800] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1622.795800] env[62816]: value = "task-1788610" [ 1622.795800] env[62816]: _type = "Task" [ 1622.795800] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.796259] env[62816]: INFO nova.compute.manager [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Took 42.34 seconds to build instance. [ 1622.807247] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788610, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.892292] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788609, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.050314] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "refresh_cache-9c246982-b215-46c1-9cd3-63907a515086" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.051314] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquired lock "refresh_cache-9c246982-b215-46c1-9cd3-63907a515086" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.053456] env[62816]: DEBUG nova.network.neutron [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1623.120181] env[62816]: DEBUG nova.objects.base [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Object Instance<679cd9a3-2ed6-451f-b934-ba7738913959> lazy-loaded attributes: resources,numa_topology {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1623.139889] env[62816]: DEBUG nova.compute.manager [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-changed-bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.140078] env[62816]: DEBUG nova.compute.manager [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing instance network info cache due to event network-changed-bed0373b-9c6a-4357-a640-8218a972cb72. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1623.140349] env[62816]: DEBUG oslo_concurrency.lockutils [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.140533] env[62816]: DEBUG oslo_concurrency.lockutils [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.140695] env[62816]: DEBUG nova.network.neutron [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing network info cache for port bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1623.156882] env[62816]: INFO nova.compute.manager [-] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Took 1.23 seconds to deallocate network for instance. [ 1623.304454] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4681cc8-58bc-4b36-9baf-98f9c328b76f tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "dd833e38-691c-4757-9c6b-659c74343d3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.862s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.310725] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788610, 'name': ReconfigVM_Task, 'duration_secs': 0.281847} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.311105] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c/9bda24c6-f950-47ff-ad3c-ff745291870c.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1623.312027] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1957a992-0f6f-4666-9fc7-339780279a1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.338891] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485664cf-6d3a-492c-8cb4-9da2fbf52574 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.363509] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc4a626-6e89-4284-b5f2-f8be46b57392 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.389513] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0702e00e-20ea-4ad0-8f6f-69abd8dad1fe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.397229] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1623.400254] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5d164fc-9136-46cf-9d0d-accee5533a41 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.401736] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788609, 'name': ReconfigVM_Task, 'duration_secs': 1.009127} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.402064] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c/0e0261fe-4376-487c-9d54-c4f37577409c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1623.403052] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c18aa31-31ed-411b-8b9b-74cbadd948a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.409289] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1623.409289] env[62816]: value = "task-1788611" [ 1623.409289] env[62816]: _type = "Task" [ 1623.409289] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.413177] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1623.413177] env[62816]: value = "task-1788612" [ 1623.413177] env[62816]: _type = "Task" [ 1623.413177] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.421601] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.426579] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788612, 'name': Rename_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.533877] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a92394dc-bbb9-481c-8234-9b359dd5ac97 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.541618] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5af297-e719-4dbc-a650-884fc0dd13d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.574166] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a760e2d-ca8e-4829-afdf-95fe10e25486 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.583212] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e13da5e-cd30-48a1-88ed-1817cc96d08d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.599035] env[62816]: DEBUG nova.compute.provider_tree [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.618248] env[62816]: DEBUG nova.network.neutron [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1623.668323] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.848120] env[62816]: DEBUG nova.compute.manager [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Received event network-vif-deleted-75743f5b-f0a7-4280-97cb-0d12ccb870c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.848120] env[62816]: DEBUG nova.compute.manager [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Received event network-vif-plugged-7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.848120] env[62816]: DEBUG oslo_concurrency.lockutils [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] Acquiring lock "9c246982-b215-46c1-9cd3-63907a515086-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.848120] env[62816]: DEBUG oslo_concurrency.lockutils [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] Lock "9c246982-b215-46c1-9cd3-63907a515086-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.848834] env[62816]: DEBUG oslo_concurrency.lockutils [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] Lock "9c246982-b215-46c1-9cd3-63907a515086-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.849339] env[62816]: DEBUG nova.compute.manager [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] No waiting events found dispatching network-vif-plugged-7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1623.849682] env[62816]: WARNING nova.compute.manager [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Received unexpected event network-vif-plugged-7e97b5a8-3b7b-4ccf-accb-c84c06259813 for instance with vm_state building and task_state spawning. [ 1623.851025] env[62816]: DEBUG nova.compute.manager [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Received event network-changed-7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.851025] env[62816]: DEBUG nova.compute.manager [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Refreshing instance network info cache due to event network-changed-7e97b5a8-3b7b-4ccf-accb-c84c06259813. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1623.851025] env[62816]: DEBUG oslo_concurrency.lockutils [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] Acquiring lock "refresh_cache-9c246982-b215-46c1-9cd3-63907a515086" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.927015] env[62816]: DEBUG oslo_vmware.api [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788611, 'name': PowerOnVM_Task, 'duration_secs': 0.389642} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.927856] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1623.935357] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788612, 'name': Rename_Task, 'duration_secs': 0.213804} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.935357] env[62816]: DEBUG nova.network.neutron [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Updating instance_info_cache with network_info: [{"id": "7e97b5a8-3b7b-4ccf-accb-c84c06259813", "address": "fa:16:3e:89:7c:c4", "network": {"id": "649e621e-a31f-42f8-b5f9-829d86163ed3", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-976997504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b28bf1d21ff41fca85f02679ce1d4fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e97b5a8-3b", "ovs_interfaceid": "7e97b5a8-3b7b-4ccf-accb-c84c06259813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1623.935357] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1623.935493] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9769510b-73a5-4130-bc2b-878a73144baf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.942625] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1623.942625] env[62816]: value = "task-1788613" [ 1623.942625] env[62816]: _type = "Task" [ 1623.942625] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.951851] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.044782] env[62816]: DEBUG nova.network.neutron [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updated VIF entry in instance network info cache for port bed0373b-9c6a-4357-a640-8218a972cb72. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1624.045555] env[62816]: DEBUG nova.network.neutron [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.101633] env[62816]: DEBUG nova.scheduler.client.report [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.440421] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Releasing lock "refresh_cache-9c246982-b215-46c1-9cd3-63907a515086" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.440784] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Instance network_info: |[{"id": "7e97b5a8-3b7b-4ccf-accb-c84c06259813", "address": "fa:16:3e:89:7c:c4", "network": {"id": "649e621e-a31f-42f8-b5f9-829d86163ed3", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-976997504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b28bf1d21ff41fca85f02679ce1d4fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e97b5a8-3b", "ovs_interfaceid": "7e97b5a8-3b7b-4ccf-accb-c84c06259813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1624.441345] env[62816]: DEBUG oslo_concurrency.lockutils [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] Acquired lock "refresh_cache-9c246982-b215-46c1-9cd3-63907a515086" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.441600] env[62816]: DEBUG nova.network.neutron [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Refreshing network info cache for port 7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1624.442626] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:7c:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '50cf0a70-948d-4611-af05-94c1483064ed', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e97b5a8-3b7b-4ccf-accb-c84c06259813', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1624.450792] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Creating folder: Project (0b28bf1d21ff41fca85f02679ce1d4fe). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1624.451442] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-749e0e64-be9f-4537-ad6c-65b0a1738131 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.464643] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788613, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.467550] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Created folder: Project (0b28bf1d21ff41fca85f02679ce1d4fe) in parent group-v370905. [ 1624.467821] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Creating folder: Instances. Parent ref: group-v371082. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1624.468123] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b76aecdf-d9ac-4688-bad1-bd06e1eb61fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.477553] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Created folder: Instances in parent group-v371082. [ 1624.477794] env[62816]: DEBUG oslo.service.loopingcall [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1624.477985] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1624.478496] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0703914-8dfc-410c-9645-dc6f58c8789c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.499494] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1624.499494] env[62816]: value = "task-1788616" [ 1624.499494] env[62816]: _type = "Task" [ 1624.499494] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.507466] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788616, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.548182] env[62816]: DEBUG oslo_concurrency.lockutils [req-ee806575-5a73-4228-a8a0-b8a12dd529e4 req-583bf848-8651-4b5f-ae9f-9af179635c67 service nova] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.609213] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.496s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.612489] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.981s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.613988] env[62816]: INFO nova.compute.claims [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1624.944019] env[62816]: INFO nova.compute.manager [None req-41e0140d-49da-46f5-be79-c4dc1cce9f06 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance to original state: 'active' [ 1624.967785] env[62816]: DEBUG oslo_vmware.api [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788613, 'name': PowerOnVM_Task, 'duration_secs': 0.793596} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.968678] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1624.969528] env[62816]: DEBUG nova.compute.manager [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1624.973145] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8936e148-774a-403c-8581-83252f818e6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.017798] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788616, 'name': CreateVM_Task, 'duration_secs': 0.362933} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.018088] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1625.019592] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.019832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.020264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.020802] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5bb5b8b-d414-41e9-852e-845348d634da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.027194] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1625.027194] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52588888-e770-1183-396b-61e8d50605a1" [ 1625.027194] env[62816]: _type = "Task" [ 1625.027194] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.037780] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52588888-e770-1183-396b-61e8d50605a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.126406] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7ba71cee-4369-4816-b152-93a67e54c5ab tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 45.958s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.128306] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 24.568s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.128807] env[62816]: INFO nova.compute.manager [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Unshelving [ 1625.268021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.268021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.268021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "1c3392d3-cfb0-47c6-9366-8c363ad21297-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.268021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.268021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.269514] env[62816]: INFO nova.compute.manager [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Terminating instance [ 1625.272644] env[62816]: DEBUG nova.compute.manager [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1625.272851] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1625.274091] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a88997-b54c-4085-b27e-0080a1383f4b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.283642] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1625.284042] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceae3f79-f76b-44f7-85ba-b1c7c5d7a48c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.292475] env[62816]: DEBUG oslo_vmware.api [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1625.292475] env[62816]: value = "task-1788617" [ 1625.292475] env[62816]: _type = "Task" [ 1625.292475] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.305474] env[62816]: DEBUG oslo_vmware.api [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788617, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.494857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.542311] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52588888-e770-1183-396b-61e8d50605a1, 'name': SearchDatastore_Task, 'duration_secs': 0.011732} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.542814] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.542941] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1625.543598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.545493] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.545892] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1625.546031] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7158882e-1b3e-46ed-9ed5-28e9bc42b203 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.557277] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1625.557277] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1625.558069] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c861ce2-4833-4f44-bf5a-829bfa9cfaa3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.563377] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1625.563377] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525f1bde-a959-117b-ca82-1bd3c0068d62" [ 1625.563377] env[62816]: _type = "Task" [ 1625.563377] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.571081] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525f1bde-a959-117b-ca82-1bd3c0068d62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.602236] env[62816]: DEBUG nova.network.neutron [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Updated VIF entry in instance network info cache for port 7e97b5a8-3b7b-4ccf-accb-c84c06259813. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1625.602476] env[62816]: DEBUG nova.network.neutron [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Updating instance_info_cache with network_info: [{"id": "7e97b5a8-3b7b-4ccf-accb-c84c06259813", "address": "fa:16:3e:89:7c:c4", "network": {"id": "649e621e-a31f-42f8-b5f9-829d86163ed3", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-976997504-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0b28bf1d21ff41fca85f02679ce1d4fe", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "50cf0a70-948d-4611-af05-94c1483064ed", "external-id": "nsx-vlan-transportzone-536", "segmentation_id": 536, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e97b5a8-3b", "ovs_interfaceid": "7e97b5a8-3b7b-4ccf-accb-c84c06259813", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.807040] env[62816]: DEBUG oslo_vmware.api [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788617, 'name': PowerOffVM_Task, 'duration_secs': 0.456239} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.810400] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1625.811107] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1625.813673] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3aa38805-03a9-4a66-b86a-ee0680c8f72e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.071454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a695a1e-fccd-4ebf-9caa-a0e6623b0214 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.079096] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525f1bde-a959-117b-ca82-1bd3c0068d62, 'name': SearchDatastore_Task, 'duration_secs': 0.01921} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.082854] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1626.083091] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1626.083297] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Deleting the datastore file [datastore1] 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1626.089473] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-505d1ac4-9538-40b3-8c52-428406d47d52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.092681] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c15e0865-dd8d-40c4-8ea6-8c960ed91bc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.096484] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1604f8c9-f8f8-4831-880d-3252715d269c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.106094] env[62816]: DEBUG oslo_concurrency.lockutils [req-51f05b3f-86c3-44e4-93c1-4309620dc197 req-0bfafca2-7cfc-4ecc-add3-82e1bead6566 service nova] Releasing lock "refresh_cache-9c246982-b215-46c1-9cd3-63907a515086" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.107806] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1626.107806] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52587a49-5284-f619-3acc-b10e30e07492" [ 1626.107806] env[62816]: _type = "Task" [ 1626.107806] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.158017] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbf53c0-97fc-4cd6-9ca3-df614d57e96c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.161925] env[62816]: DEBUG oslo_vmware.api [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1626.161925] env[62816]: value = "task-1788619" [ 1626.161925] env[62816]: _type = "Task" [ 1626.161925] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.173255] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52587a49-5284-f619-3acc-b10e30e07492, 'name': SearchDatastore_Task, 'duration_secs': 0.015863} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.174789] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.175647] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.175958] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9c246982-b215-46c1-9cd3-63907a515086/9c246982-b215-46c1-9cd3-63907a515086.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1626.177361] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab23afe-ec7d-436c-a2ce-06149d45234a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.184848] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16dbee3b-7ec8-4e37-83f6-39071fd4d65e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.186412] env[62816]: DEBUG oslo_vmware.api [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.198901] env[62816]: DEBUG nova.compute.provider_tree [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.202443] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1626.202443] env[62816]: value = "task-1788620" [ 1626.202443] env[62816]: _type = "Task" [ 1626.202443] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.214433] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.452530] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "65e97c6a-5d8f-4241-9095-65a5a6132a69" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.452810] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.453046] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "65e97c6a-5d8f-4241-9095-65a5a6132a69-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.453234] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.453407] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.456300] env[62816]: INFO nova.compute.manager [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Terminating instance [ 1626.458215] env[62816]: DEBUG nova.compute.manager [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1626.458424] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1626.459280] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e2ec6e-cb2e-4c1d-a214-8f5cfb5a1d99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.468831] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1626.469132] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-771e6039-2c71-4d5f-8913-1a15c281b549 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.478019] env[62816]: DEBUG oslo_vmware.api [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1626.478019] env[62816]: value = "task-1788621" [ 1626.478019] env[62816]: _type = "Task" [ 1626.478019] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.490218] env[62816]: DEBUG oslo_vmware.api [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.675063] env[62816]: DEBUG oslo_vmware.api [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176407} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.675063] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1626.675421] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1626.676670] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1626.676670] env[62816]: INFO nova.compute.manager [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1626.676670] env[62816]: DEBUG oslo.service.loopingcall [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1626.676670] env[62816]: DEBUG nova.compute.manager [-] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1626.677040] env[62816]: DEBUG nova.network.neutron [-] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1626.708719] env[62816]: DEBUG nova.scheduler.client.report [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1626.713113] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "9bda24c6-f950-47ff-ad3c-ff745291870c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.713507] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.713734] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1626.713922] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.714164] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.722951] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788620, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51493} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.723553] env[62816]: INFO nova.compute.manager [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Terminating instance [ 1626.727203] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9c246982-b215-46c1-9cd3-63907a515086/9c246982-b215-46c1-9cd3-63907a515086.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.727443] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.728395] env[62816]: DEBUG nova.compute.manager [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1626.728594] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1626.728845] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79702b8f-3063-47fb-9cec-d7a77bbdd97b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.732227] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fcdebe-84c1-468c-880c-c3f035fca982 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.741631] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1626.743010] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87353252-08b2-4b3d-a77a-715c8016b065 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.744668] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1626.744668] env[62816]: value = "task-1788622" [ 1626.744668] env[62816]: _type = "Task" [ 1626.744668] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.751188] env[62816]: DEBUG oslo_vmware.api [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1626.751188] env[62816]: value = "task-1788623" [ 1626.751188] env[62816]: _type = "Task" [ 1626.751188] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.759382] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.764577] env[62816]: DEBUG oslo_vmware.api [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788623, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.994544] env[62816]: DEBUG oslo_vmware.api [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788621, 'name': PowerOffVM_Task, 'duration_secs': 0.227672} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.997630] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1626.997895] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1626.998599] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-978b2ac6-41ec-42af-9caf-b235dcd33b9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.131357] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1627.131664] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1627.132269] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleting the datastore file [datastore1] 65e97c6a-5d8f-4241-9095-65a5a6132a69 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1627.134069] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0fd9a76-5254-4908-bb93-9161eb7fac09 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.145190] env[62816]: DEBUG oslo_vmware.api [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1627.145190] env[62816]: value = "task-1788625" [ 1627.145190] env[62816]: _type = "Task" [ 1627.145190] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.154320] env[62816]: DEBUG oslo_vmware.api [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788625, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.218088] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.219218] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1627.220925] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.947s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.222966] env[62816]: INFO nova.compute.claims [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1627.240885] env[62816]: DEBUG nova.compute.manager [req-c06bcfe4-79fb-46eb-9b6e-a001e481f37e req-5feb47c9-dcf8-46ff-93c2-3c5d49104394 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Received event network-vif-deleted-1110b9ce-766b-4ab4-b75f-4e0139f78297 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1627.240885] env[62816]: INFO nova.compute.manager [req-c06bcfe4-79fb-46eb-9b6e-a001e481f37e req-5feb47c9-dcf8-46ff-93c2-3c5d49104394 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Neutron deleted interface 1110b9ce-766b-4ab4-b75f-4e0139f78297; detaching it from the instance and deleting it from the info cache [ 1627.240885] env[62816]: DEBUG nova.network.neutron [req-c06bcfe4-79fb-46eb-9b6e-a001e481f37e req-5feb47c9-dcf8-46ff-93c2-3c5d49104394 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.255631] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788622, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064229} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.259403] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1627.260693] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec75e20c-53c8-4f5d-becd-15fd2fc35fc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.270434] env[62816]: DEBUG oslo_vmware.api [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788623, 'name': PowerOffVM_Task, 'duration_secs': 0.198161} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.284057] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1627.284057] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1627.293746] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 9c246982-b215-46c1-9cd3-63907a515086/9c246982-b215-46c1-9cd3-63907a515086.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1627.295253] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90ee39ff-c33d-4049-9256-93968ed0bb00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.297499] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efb9ac11-a31d-4306-9eae-95bbadd2e0de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.319513] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1627.319513] env[62816]: value = "task-1788627" [ 1627.319513] env[62816]: _type = "Task" [ 1627.319513] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.328615] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788627, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.655651] env[62816]: DEBUG oslo_vmware.api [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788625, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.249853} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.656110] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1627.657796] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1627.658118] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1627.661306] env[62816]: INFO nova.compute.manager [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1627.661591] env[62816]: DEBUG oslo.service.loopingcall [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1627.661816] env[62816]: DEBUG nova.compute.manager [-] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1627.661916] env[62816]: DEBUG nova.network.neutron [-] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1627.713663] env[62816]: DEBUG nova.network.neutron [-] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.731204] env[62816]: DEBUG nova.compute.utils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1627.735849] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1627.735849] env[62816]: DEBUG nova.network.neutron [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1627.745394] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89128e92-7dce-4fb4-9073-c91331066328 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.749452] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1627.749722] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1627.749951] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleting the datastore file [datastore1] 9bda24c6-f950-47ff-ad3c-ff745291870c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1627.750701] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0fa8a03-c342-4dc6-9109-4ee9d01a732f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.762019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d305e85-eedf-4560-be21-e853905693d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.774508] env[62816]: DEBUG oslo_vmware.api [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1627.774508] env[62816]: value = "task-1788628" [ 1627.774508] env[62816]: _type = "Task" [ 1627.774508] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.784178] env[62816]: DEBUG oslo_vmware.api [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.808290] env[62816]: DEBUG nova.compute.manager [req-c06bcfe4-79fb-46eb-9b6e-a001e481f37e req-5feb47c9-dcf8-46ff-93c2-3c5d49104394 service nova] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Detach interface failed, port_id=1110b9ce-766b-4ab4-b75f-4e0139f78297, reason: Instance 1c3392d3-cfb0-47c6-9366-8c363ad21297 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1627.819223] env[62816]: DEBUG nova.policy [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bec58fde0e046df8cef98d5084a3000', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '23a18ca8db4a46c1a1cd5c3d8ac78970', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1627.832240] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788627, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.195645] env[62816]: DEBUG nova.network.neutron [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Successfully created port: 98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1628.217450] env[62816]: INFO nova.compute.manager [-] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Took 1.54 seconds to deallocate network for instance. [ 1628.234565] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1628.289238] env[62816]: DEBUG oslo_vmware.api [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205788} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.289516] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1628.289707] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1628.289882] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1628.290071] env[62816]: INFO nova.compute.manager [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Took 1.56 seconds to destroy the instance on the hypervisor. [ 1628.290422] env[62816]: DEBUG oslo.service.loopingcall [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.290689] env[62816]: DEBUG nova.compute.manager [-] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1628.290788] env[62816]: DEBUG nova.network.neutron [-] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1628.334164] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788627, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.515629] env[62816]: DEBUG nova.network.neutron [-] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.715106] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04c74a1b-6abd-47d3-803e-fb065aec718a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.723323] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9adeb05-8a73-4b20-8d4e-699bf7a9d4d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.728889] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1628.763085] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b45637c-5c08-4939-90b2-ef75489a547c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.771368] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25b231d-ad0e-46a8-bf47-ea7b429086cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.790019] env[62816]: DEBUG nova.compute.provider_tree [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1628.831913] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788627, 'name': ReconfigVM_Task, 'duration_secs': 1.033436} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.832491] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 9c246982-b215-46c1-9cd3-63907a515086/9c246982-b215-46c1-9cd3-63907a515086.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1628.833318] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1aa0b1e8-4323-444d-8ac8-2e6f041ee4b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.839432] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1628.839432] env[62816]: value = "task-1788629" [ 1628.839432] env[62816]: _type = "Task" [ 1628.839432] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.849750] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788629, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.019131] env[62816]: INFO nova.compute.manager [-] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Took 1.36 seconds to deallocate network for instance. [ 1629.219665] env[62816]: DEBUG nova.network.neutron [-] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.264104] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1629.275194] env[62816]: DEBUG nova.compute.manager [req-c3f3873b-fe51-42aa-8b62-eac6506f304b req-a3b75646-51f0-4f8e-b5d6-62b5db3553bf service nova] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Received event network-vif-deleted-d7d10695-86f5-4fee-b062-9934fa07e003 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1629.275392] env[62816]: DEBUG nova.compute.manager [req-c3f3873b-fe51-42aa-8b62-eac6506f304b req-a3b75646-51f0-4f8e-b5d6-62b5db3553bf service nova] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Received event network-vif-deleted-5b2b9d44-f66e-428f-a75c-6e213ebdb364 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1629.291384] env[62816]: DEBUG nova.scheduler.client.report [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1629.297868] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1629.298261] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1629.298439] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1629.298627] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1629.298778] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1629.298925] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1629.299148] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1629.299331] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1629.299475] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1629.299700] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1629.299901] env[62816]: DEBUG nova.virt.hardware [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1629.301081] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9dbf4e-4a7e-46c8-b1ff-87fdd1e3cabb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.309895] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c7cf2c-5911-4bd2-b0c8-0758b566ebfd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.350554] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788629, 'name': Rename_Task, 'duration_secs': 0.155325} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.350793] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1629.351095] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cba2ff7a-3774-444f-af79-c0060e3de74a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.358025] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1629.358025] env[62816]: value = "task-1788630" [ 1629.358025] env[62816]: _type = "Task" [ 1629.358025] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.364798] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788630, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.525539] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.722496] env[62816]: INFO nova.compute.manager [-] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Took 1.43 seconds to deallocate network for instance. [ 1629.806040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.806668] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1629.809421] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.340s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.809695] env[62816]: DEBUG nova.objects.instance [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lazy-loading 'resources' on Instance uuid 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1629.868238] env[62816]: DEBUG oslo_vmware.api [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788630, 'name': PowerOnVM_Task, 'duration_secs': 0.444917} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.868527] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1629.868731] env[62816]: INFO nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1629.868915] env[62816]: DEBUG nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1629.870367] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6aca4bb-058f-418f-bfde-e2db4d5f2e3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.230303] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.231260] env[62816]: DEBUG nova.network.neutron [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Successfully updated port: 98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.312160] env[62816]: DEBUG nova.compute.utils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1630.315852] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1630.316024] env[62816]: DEBUG nova.network.neutron [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1630.379473] env[62816]: DEBUG nova.policy [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a3267ab64e4640bf00a0e5dbaaf044', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d830983a3c14168b8f0b67478f27589', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1630.395571] env[62816]: INFO nova.compute.manager [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Took 35.79 seconds to build instance. [ 1630.662915] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8134113a-4edc-483f-9028-709ddf9f329c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.671344] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3e7bce-fed6-4d9b-8dad-54ae1ba64023 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.706269] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90c56f6-a5af-4889-a9b5-d8b495aa7598 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.713903] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b999371c-d7cd-47f6-a7ba-d0e40755ae8b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.728064] env[62816]: DEBUG nova.compute.provider_tree [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1630.733798] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "refresh_cache-ecf6469a-c110-4e29-b931-6f9a3b0144dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.734553] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquired lock "refresh_cache-ecf6469a-c110-4e29-b931-6f9a3b0144dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.734553] env[62816]: DEBUG nova.network.neutron [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1630.819369] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1630.897848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-975aa508-aa1e-46b0-9e34-f539e92cad49 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "9c246982-b215-46c1-9cd3-63907a515086" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.746s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.900512] env[62816]: DEBUG nova.network.neutron [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Successfully created port: 3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1631.232262] env[62816]: DEBUG nova.scheduler.client.report [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1631.307981] env[62816]: DEBUG nova.network.neutron [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1631.535469] env[62816]: DEBUG nova.compute.manager [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Received event network-vif-plugged-98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.535702] env[62816]: DEBUG oslo_concurrency.lockutils [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] Acquiring lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.535989] env[62816]: DEBUG oslo_concurrency.lockutils [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.536212] env[62816]: DEBUG oslo_concurrency.lockutils [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.536392] env[62816]: DEBUG nova.compute.manager [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] No waiting events found dispatching network-vif-plugged-98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1631.536560] env[62816]: WARNING nova.compute.manager [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Received unexpected event network-vif-plugged-98aca667-93d3-4672-9068-92a6781b5d7b for instance with vm_state building and task_state spawning. [ 1631.536722] env[62816]: DEBUG nova.compute.manager [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Received event network-changed-98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1631.536914] env[62816]: DEBUG nova.compute.manager [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Refreshing instance network info cache due to event network-changed-98aca667-93d3-4672-9068-92a6781b5d7b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1631.538441] env[62816]: DEBUG oslo_concurrency.lockutils [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] Acquiring lock "refresh_cache-ecf6469a-c110-4e29-b931-6f9a3b0144dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1631.667688] env[62816]: DEBUG nova.network.neutron [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Updating instance_info_cache with network_info: [{"id": "98aca667-93d3-4672-9068-92a6781b5d7b", "address": "fa:16:3e:7a:e3:ca", "network": {"id": "95ad17ca-3a7f-426a-9d96-573788f7e7c8", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1514368251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23a18ca8db4a46c1a1cd5c3d8ac78970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "79c2e589-f55b-4843-8d99-2e565be16706", "external-id": "nsx-vlan-transportzone-858", "segmentation_id": 858, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98aca667-93", "ovs_interfaceid": "98aca667-93d3-4672-9068-92a6781b5d7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.737501] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.928s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.742031] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 27.051s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.742232] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.742392] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1631.742698] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.747s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.742915] env[62816]: DEBUG nova.objects.instance [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lazy-loading 'resources' on Instance uuid d34b7828-542e-4b66-a923-644d0d0f4866 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1631.745475] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfa1ca6-297f-4fa8-8147-f4f5e3ce49e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.761442] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4a07d8-7042-4c60-9915-9598226cadcd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.777395] env[62816]: INFO nova.scheduler.client.report [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Deleted allocations for instance 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf [ 1631.778849] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfd97c8-f052-445d-9f3a-5228babafe31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.788751] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5266f71f-782a-4928-ae10-1caefcfd6d65 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.820726] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178909MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1631.820892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.830500] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1631.864976] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='825db12975b556431b58af72e35db5f7',container_format='bare',created_at=2024-12-12T02:54:00Z,direct_url=,disk_format='vmdk',id=3dbb4887-cb05-4553-b496-bc3e99336442,min_disk=1,min_ram=0,name='tempest-test-snap-1092905997',owner='2d830983a3c14168b8f0b67478f27589',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-12T02:54:17Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1631.865262] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1631.865422] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1631.865606] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1631.865756] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1631.865920] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1631.866162] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1631.866328] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1631.866498] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1631.866660] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1631.866835] env[62816]: DEBUG nova.virt.hardware [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1631.867728] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8a6f8b-d5a4-4b3b-81c8-a30a0452241b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.876014] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815b1886-c4c1-4589-9885-8a7834ff0615 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.170355] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Releasing lock "refresh_cache-ecf6469a-c110-4e29-b931-6f9a3b0144dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.170771] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Instance network_info: |[{"id": "98aca667-93d3-4672-9068-92a6781b5d7b", "address": "fa:16:3e:7a:e3:ca", "network": {"id": "95ad17ca-3a7f-426a-9d96-573788f7e7c8", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1514368251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23a18ca8db4a46c1a1cd5c3d8ac78970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "79c2e589-f55b-4843-8d99-2e565be16706", "external-id": "nsx-vlan-transportzone-858", "segmentation_id": 858, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98aca667-93", "ovs_interfaceid": "98aca667-93d3-4672-9068-92a6781b5d7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1632.171155] env[62816]: DEBUG oslo_concurrency.lockutils [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] Acquired lock "refresh_cache-ecf6469a-c110-4e29-b931-6f9a3b0144dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.171413] env[62816]: DEBUG nova.network.neutron [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Refreshing network info cache for port 98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1632.173367] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:e3:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '79c2e589-f55b-4843-8d99-2e565be16706', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '98aca667-93d3-4672-9068-92a6781b5d7b', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.183082] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Creating folder: Project (23a18ca8db4a46c1a1cd5c3d8ac78970). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1632.183308] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a289dbb6-7ad0-4e9f-ba17-286a9d03327e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.196449] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Created folder: Project (23a18ca8db4a46c1a1cd5c3d8ac78970) in parent group-v370905. [ 1632.196633] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Creating folder: Instances. Parent ref: group-v371085. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1632.196851] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20fc9d66-3566-496d-9d80-9d073b31f9ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.207694] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Created folder: Instances in parent group-v371085. [ 1632.207948] env[62816]: DEBUG oslo.service.loopingcall [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.208154] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.208391] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebaed680-6b5c-4013-bb95-e478c4fb1e18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.231084] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1632.231084] env[62816]: value = "task-1788633" [ 1632.231084] env[62816]: _type = "Task" [ 1632.231084] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.243556] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788633, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.288776] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b11dc008-a190-494c-8693-026f136641e9 tempest-ServersAdminNegativeTestJSON-1189706527 tempest-ServersAdminNegativeTestJSON-1189706527-project-member] Lock "0707fdd6-2aed-4a09-90e0-c7fb0eae6acf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.638s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.654688] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22014df9-baea-4f19-ae1b-462e36343dc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.665865] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a2e513-6a1b-4b5b-b96b-d584534f184d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.697959] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3614b35-b50b-4117-a77d-ba233e35fceb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.707158] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e0fb23-e685-467f-8640-eaf8e6aed9c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.725821] env[62816]: DEBUG nova.compute.provider_tree [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1632.729628] env[62816]: DEBUG nova.compute.manager [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1632.730610] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60facd1-5671-4272-97a3-cf943c5af6f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.748022] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788633, 'name': CreateVM_Task, 'duration_secs': 0.337092} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.748235] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1632.748900] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.749076] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.749667] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1632.749667] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbf332d5-45af-4142-9f3c-438a98f14994 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.755122] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1632.755122] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52588701-8f35-6fb8-141b-00e6e8bef4e9" [ 1632.755122] env[62816]: _type = "Task" [ 1632.755122] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.764668] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52588701-8f35-6fb8-141b-00e6e8bef4e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.124185] env[62816]: DEBUG nova.network.neutron [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Updated VIF entry in instance network info cache for port 98aca667-93d3-4672-9068-92a6781b5d7b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.124185] env[62816]: DEBUG nova.network.neutron [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Updating instance_info_cache with network_info: [{"id": "98aca667-93d3-4672-9068-92a6781b5d7b", "address": "fa:16:3e:7a:e3:ca", "network": {"id": "95ad17ca-3a7f-426a-9d96-573788f7e7c8", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1514368251-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "23a18ca8db4a46c1a1cd5c3d8ac78970", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "79c2e589-f55b-4843-8d99-2e565be16706", "external-id": "nsx-vlan-transportzone-858", "segmentation_id": 858, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap98aca667-93", "ovs_interfaceid": "98aca667-93d3-4672-9068-92a6781b5d7b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.218411] env[62816]: DEBUG nova.network.neutron [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Successfully updated port: 3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1633.231073] env[62816]: DEBUG nova.scheduler.client.report [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1633.249623] env[62816]: INFO nova.compute.manager [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] instance snapshotting [ 1633.253606] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700adb4d-11b9-4e1b-b951-6f8247f5fefc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.266642] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52588701-8f35-6fb8-141b-00e6e8bef4e9, 'name': SearchDatastore_Task, 'duration_secs': 0.010411} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.284671] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.284960] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1633.285231] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.285386] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.285572] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.286377] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23d77120-6af0-4340-8dd0-1c11f4314e27 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.288796] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86eaa440-00a5-44d9-818f-236c07fa8a1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.300120] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1633.300403] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1633.301333] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06e00298-ad2f-4843-a1f1-254670282f0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.307616] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1633.307616] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a2fa87-5569-3087-3a3c-7ad4a65bb91a" [ 1633.307616] env[62816]: _type = "Task" [ 1633.307616] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.317596] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a2fa87-5569-3087-3a3c-7ad4a65bb91a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.627742] env[62816]: DEBUG oslo_concurrency.lockutils [req-3bd914f0-68d3-4c99-a4c6-72d2ba6c4ba5 req-89f4b65b-2f9b-4cd6-bfda-f1343166fad1 service nova] Releasing lock "refresh_cache-ecf6469a-c110-4e29-b931-6f9a3b0144dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.631075] env[62816]: DEBUG nova.compute.manager [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Received event network-vif-plugged-3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1633.631427] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] Acquiring lock "c4117422-edd4-49a0-882c-2d8ae39b344d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.632262] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.632538] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.632852] env[62816]: DEBUG nova.compute.manager [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] No waiting events found dispatching network-vif-plugged-3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1633.633238] env[62816]: WARNING nova.compute.manager [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Received unexpected event network-vif-plugged-3705da00-5613-4084-9e67-b96640678858 for instance with vm_state building and task_state spawning. [ 1633.633355] env[62816]: DEBUG nova.compute.manager [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Received event network-changed-3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1633.633554] env[62816]: DEBUG nova.compute.manager [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Refreshing instance network info cache due to event network-changed-3705da00-5613-4084-9e67-b96640678858. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1633.633853] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] Acquiring lock "refresh_cache-c4117422-edd4-49a0-882c-2d8ae39b344d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.634131] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] Acquired lock "refresh_cache-c4117422-edd4-49a0-882c-2d8ae39b344d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.634646] env[62816]: DEBUG nova.network.neutron [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Refreshing network info cache for port 3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1633.721822] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "refresh_cache-c4117422-edd4-49a0-882c-2d8ae39b344d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.725360] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.725360] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.739759] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.997s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.745023] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.996s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.745331] env[62816]: INFO nova.compute.claims [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.763087] env[62816]: INFO nova.scheduler.client.report [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Deleted allocations for instance d34b7828-542e-4b66-a923-644d0d0f4866 [ 1633.800514] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1633.800775] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-595a36ed-6541-491f-b50c-e48b8fefa8fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.809526] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1633.809526] env[62816]: value = "task-1788634" [ 1633.809526] env[62816]: _type = "Task" [ 1633.809526] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.832818] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788634, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.833386] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a2fa87-5569-3087-3a3c-7ad4a65bb91a, 'name': SearchDatastore_Task, 'duration_secs': 0.009077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.834152] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56cbe552-a1b8-4868-b3cf-b6c8c0a835b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.840134] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1633.840134] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52508aff-955d-2286-7a11-35e7c4b9d842" [ 1633.840134] env[62816]: _type = "Task" [ 1633.840134] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.848611] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52508aff-955d-2286-7a11-35e7c4b9d842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.159033] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.159294] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1634.184524] env[62816]: DEBUG nova.network.neutron [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1634.229366] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1634.273340] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e09d7a2-70e9-4470-b154-7f51d414ad89 tempest-InstanceActionsTestJSON-1299200478 tempest-InstanceActionsTestJSON-1299200478-project-member] Lock "d34b7828-542e-4b66-a923-644d0d0f4866" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.779s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.311561] env[62816]: DEBUG nova.network.neutron [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.324827] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788634, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.352216] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52508aff-955d-2286-7a11-35e7c4b9d842, 'name': SearchDatastore_Task, 'duration_secs': 0.008723} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.352216] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.352444] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ecf6469a-c110-4e29-b931-6f9a3b0144dc/ecf6469a-c110-4e29-b931-6f9a3b0144dc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.352710] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe6dc401-1394-4ad4-9e69-d471855c45b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.359584] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1634.359584] env[62816]: value = "task-1788635" [ 1634.359584] env[62816]: _type = "Task" [ 1634.359584] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.367711] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.662228] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1634.752531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.815480] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4f78d79-167d-4d07-9012-f6876ec30e60 req-c3c4b269-147a-47b0-92a4-dd235f7a1a5b service nova] Releasing lock "refresh_cache-c4117422-edd4-49a0-882c-2d8ae39b344d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.817529] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "refresh_cache-c4117422-edd4-49a0-882c-2d8ae39b344d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1634.817529] env[62816]: DEBUG nova.network.neutron [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1634.833281] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788634, 'name': CreateSnapshot_Task, 'duration_secs': 0.713186} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.836722] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1634.838020] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c54be4-f5bc-4114-be20-edb942d8de96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.878613] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476261} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.879777] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] ecf6469a-c110-4e29-b931-6f9a3b0144dc/ecf6469a-c110-4e29-b931-6f9a3b0144dc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1634.879777] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1634.879777] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fda2aea3-aa29-4a60-88c4-c41cf3124649 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.886865] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1634.886865] env[62816]: value = "task-1788636" [ 1634.886865] env[62816]: _type = "Task" [ 1634.886865] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.896906] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.184980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1635.219306] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fc5729-ef5b-4022-ae5a-1ef6e8ba40a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.227021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4dcd02-5b5f-4f80-b29e-01bb4033abf4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.262159] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598e87f5-aca8-43f9-b122-a91b3bc775c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.270541] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90d3e75-a44d-46b8-a61f-265ce11acfce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.287079] env[62816]: DEBUG nova.compute.provider_tree [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.359803] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1635.360085] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a830258b-e96d-47b3-98da-d17d63b76f7f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.364101] env[62816]: DEBUG nova.network.neutron [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1635.372178] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1635.372178] env[62816]: value = "task-1788637" [ 1635.372178] env[62816]: _type = "Task" [ 1635.372178] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.382298] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788637, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.398353] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072003} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.398577] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.399405] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614d6034-d13e-4a3a-b20f-c8a11c5b857d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.423756] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] ecf6469a-c110-4e29-b931-6f9a3b0144dc/ecf6469a-c110-4e29-b931-6f9a3b0144dc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.426592] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42ee93da-6f3d-4d5b-94e4-f24c73e3cbf4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.450933] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1635.450933] env[62816]: value = "task-1788638" [ 1635.450933] env[62816]: _type = "Task" [ 1635.450933] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.460393] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788638, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.791324] env[62816]: DEBUG nova.scheduler.client.report [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1635.846204] env[62816]: DEBUG nova.network.neutron [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Updating instance_info_cache with network_info: [{"id": "3705da00-5613-4084-9e67-b96640678858", "address": "fa:16:3e:d1:13:f0", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3705da00-56", "ovs_interfaceid": "3705da00-5613-4084-9e67-b96640678858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.884347] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788637, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.966646] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.306340] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.306893] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1636.314085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.246s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.314085] env[62816]: DEBUG nova.objects.instance [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1636.350077] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "refresh_cache-c4117422-edd4-49a0-882c-2d8ae39b344d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.350617] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Instance network_info: |[{"id": "3705da00-5613-4084-9e67-b96640678858", "address": "fa:16:3e:d1:13:f0", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3705da00-56", "ovs_interfaceid": "3705da00-5613-4084-9e67-b96640678858", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1636.350905] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:13:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3705da00-5613-4084-9e67-b96640678858', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1636.360247] env[62816]: DEBUG oslo.service.loopingcall [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1636.360247] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1636.360410] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb60fffe-0e83-45b9-b508-d0521557e5c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.394461] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1636.394461] env[62816]: value = "task-1788639" [ 1636.394461] env[62816]: _type = "Task" [ 1636.394461] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.395112] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788637, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.405842] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788639, 'name': CreateVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.468022] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788638, 'name': ReconfigVM_Task, 'duration_secs': 0.793844} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.468022] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Reconfigured VM instance instance-00000040 to attach disk [datastore1] ecf6469a-c110-4e29-b931-6f9a3b0144dc/ecf6469a-c110-4e29-b931-6f9a3b0144dc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.468022] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c80d81a-6a86-4b20-a2f4-f731a1b576ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.474261] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1636.474261] env[62816]: value = "task-1788640" [ 1636.474261] env[62816]: _type = "Task" [ 1636.474261] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.485325] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788640, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.818303] env[62816]: DEBUG nova.compute.utils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.824967] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1636.825090] env[62816]: DEBUG nova.network.neutron [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1636.882391] env[62816]: DEBUG nova.policy [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cb6aaca1d3a43a891afb292444fcf1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'af5e759640e745e0898669640785470f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1636.890511] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788637, 'name': CloneVM_Task, 'duration_secs': 1.369706} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.890511] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Created linked-clone VM from snapshot [ 1636.890822] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa1224d3-51c6-4e18-9642-44eaafe7563c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.899556] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Uploading image 8286f5f3-e792-4aa2-b92b-5847b85da401 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1636.908585] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788639, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.922899] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1636.922899] env[62816]: value = "vm-371089" [ 1636.922899] env[62816]: _type = "VirtualMachine" [ 1636.922899] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1636.923247] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f7226553-d386-42bd-84ed-b55b869e0f22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.930456] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lease: (returnval){ [ 1636.930456] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c12aa-6590-3a13-9f0a-30b915390b50" [ 1636.930456] env[62816]: _type = "HttpNfcLease" [ 1636.930456] env[62816]: } obtained for exporting VM: (result){ [ 1636.930456] env[62816]: value = "vm-371089" [ 1636.930456] env[62816]: _type = "VirtualMachine" [ 1636.930456] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1636.930926] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the lease: (returnval){ [ 1636.930926] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c12aa-6590-3a13-9f0a-30b915390b50" [ 1636.930926] env[62816]: _type = "HttpNfcLease" [ 1636.930926] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1636.937856] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1636.937856] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c12aa-6590-3a13-9f0a-30b915390b50" [ 1636.937856] env[62816]: _type = "HttpNfcLease" [ 1636.937856] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1636.989193] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788640, 'name': Rename_Task, 'duration_secs': 0.140172} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.989490] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1636.989736] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9c3b0fe0-8139-485f-b0b8-44fd984ec5d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.997821] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1636.997821] env[62816]: value = "task-1788642" [ 1636.997821] env[62816]: _type = "Task" [ 1636.997821] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.004683] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.184516] env[62816]: DEBUG nova.network.neutron [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Successfully created port: 6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.331399] env[62816]: DEBUG oslo_concurrency.lockutils [None req-90e2e763-9aa8-4f80-8e34-f96a398a29d6 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.334113] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1637.336684] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.957s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.336943] env[62816]: DEBUG nova.objects.instance [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lazy-loading 'resources' on Instance uuid e003e41d-93e8-4258-b8ca-3c2420b73df0 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1637.409685] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788639, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.439085] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1637.439085] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c12aa-6590-3a13-9f0a-30b915390b50" [ 1637.439085] env[62816]: _type = "HttpNfcLease" [ 1637.439085] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1637.443102] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1637.443102] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c12aa-6590-3a13-9f0a-30b915390b50" [ 1637.443102] env[62816]: _type = "HttpNfcLease" [ 1637.443102] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1637.443102] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e80dcd-b85f-4069-afe3-48c82841ed2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.450452] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5277c189-fa20-503f-6233-a5dfdef46e2a/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1637.450647] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5277c189-fa20-503f-6233-a5dfdef46e2a/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1637.526412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "8ccce660-6c41-412d-99ac-65ca7915d728" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.527497] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "8ccce660-6c41-412d-99ac-65ca7915d728" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.535328] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788642, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.570976] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-715a3f67-f7eb-49da-ab55-33f14c3ceecf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.909595] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788639, 'name': CreateVM_Task, 'duration_secs': 1.361547} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.909995] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1637.910347] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1637.910526] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.911127] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1637.914030] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab55d76d-51ef-4b93-9fc8-6f740c92604d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.918884] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1637.918884] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527e604b-573b-e863-9a64-d8fb9f6754a7" [ 1637.918884] env[62816]: _type = "Task" [ 1637.918884] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.927653] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527e604b-573b-e863-9a64-d8fb9f6754a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.027259] env[62816]: DEBUG oslo_vmware.api [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788642, 'name': PowerOnVM_Task, 'duration_secs': 0.748937} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.027556] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1638.027759] env[62816]: INFO nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Took 8.76 seconds to spawn the instance on the hypervisor. [ 1638.027941] env[62816]: DEBUG nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1638.028762] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e79d774-b9eb-4cf4-94f5-48f840039839 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.038374] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1638.248737] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e2ced5-ca1d-469e-aac6-c1c2df7037fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.257313] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28bbe3a-260c-4d4c-9428-888eb77f86d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.291504] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92b0fcd-02f5-4f08-b629-bdc36ceeb2e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.299453] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61aa7cd1-4d2a-4a92-90d2-46ac6578bff2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.313715] env[62816]: DEBUG nova.compute.provider_tree [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1638.349519] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1638.371619] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.371880] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.372056] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.372253] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.372398] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.372547] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.372760] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.372918] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.373100] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.373268] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.373442] env[62816]: DEBUG nova.virt.hardware [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.374347] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdcf34f-99f5-445e-a001-7099b554df4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.382884] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-926dcea6-1b62-48b5-8d64-7f2702d042fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.429097] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.429529] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Processing image 3dbb4887-cb05-4553-b496-bc3e99336442 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1638.429878] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.430129] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.430397] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1638.430807] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f007b59f-ac08-4dda-8a5f-7f23035de90b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.441211] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1638.441501] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1638.442326] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a92a030-27c5-4b44-a80e-3fc6c1af31ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.447722] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1638.447722] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a9c384-f017-fa9e-2271-e1417efe77c2" [ 1638.447722] env[62816]: _type = "Task" [ 1638.447722] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.455339] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a9c384-f017-fa9e-2271-e1417efe77c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.558644] env[62816]: INFO nova.compute.manager [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Took 38.94 seconds to build instance. [ 1638.572639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.714383] env[62816]: DEBUG nova.compute.manager [req-c1d419a7-a23b-4072-b6c1-483d4b5ce07a req-5c107ae7-22b3-4d95-9705-368e98b4e2ab service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Received event network-vif-plugged-6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1638.714680] env[62816]: DEBUG oslo_concurrency.lockutils [req-c1d419a7-a23b-4072-b6c1-483d4b5ce07a req-5c107ae7-22b3-4d95-9705-368e98b4e2ab service nova] Acquiring lock "75165526-2744-40b3-b311-45d13cc48cf1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.714970] env[62816]: DEBUG oslo_concurrency.lockutils [req-c1d419a7-a23b-4072-b6c1-483d4b5ce07a req-5c107ae7-22b3-4d95-9705-368e98b4e2ab service nova] Lock "75165526-2744-40b3-b311-45d13cc48cf1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.715296] env[62816]: DEBUG oslo_concurrency.lockutils [req-c1d419a7-a23b-4072-b6c1-483d4b5ce07a req-5c107ae7-22b3-4d95-9705-368e98b4e2ab service nova] Lock "75165526-2744-40b3-b311-45d13cc48cf1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.715476] env[62816]: DEBUG nova.compute.manager [req-c1d419a7-a23b-4072-b6c1-483d4b5ce07a req-5c107ae7-22b3-4d95-9705-368e98b4e2ab service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] No waiting events found dispatching network-vif-plugged-6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1638.715759] env[62816]: WARNING nova.compute.manager [req-c1d419a7-a23b-4072-b6c1-483d4b5ce07a req-5c107ae7-22b3-4d95-9705-368e98b4e2ab service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Received unexpected event network-vif-plugged-6c4ae91b-5f2b-45f1-9305-288569be421f for instance with vm_state building and task_state spawning. [ 1638.816278] env[62816]: DEBUG nova.network.neutron [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Successfully updated port: 6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.818287] env[62816]: DEBUG nova.scheduler.client.report [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1638.958400] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Preparing fetch location {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1638.958778] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Fetch image to [datastore1] OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3/OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3.vmdk {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1638.958979] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Downloading stream optimized image 3dbb4887-cb05-4553-b496-bc3e99336442 to [datastore1] OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3/OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3.vmdk on the data store datastore1 as vApp {{(pid=62816) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1638.959174] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Downloading image file data 3dbb4887-cb05-4553-b496-bc3e99336442 to the ESX as VM named 'OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3' {{(pid=62816) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1639.035652] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1639.035652] env[62816]: value = "resgroup-9" [ 1639.035652] env[62816]: _type = "ResourcePool" [ 1639.035652] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1639.036596] env[62816]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-48bd2897-9fa5-463e-af1a-d6a7e183393a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.057620] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease: (returnval){ [ 1639.057620] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225faee-38fe-f980-61f9-eba6acc0dcfb" [ 1639.057620] env[62816]: _type = "HttpNfcLease" [ 1639.057620] env[62816]: } obtained for vApp import into resource pool (val){ [ 1639.057620] env[62816]: value = "resgroup-9" [ 1639.057620] env[62816]: _type = "ResourcePool" [ 1639.057620] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1639.057932] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the lease: (returnval){ [ 1639.057932] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225faee-38fe-f980-61f9-eba6acc0dcfb" [ 1639.057932] env[62816]: _type = "HttpNfcLease" [ 1639.057932] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1639.061416] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7a5ab5d8-03fc-469c-8b70-1ae65051104f tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.452s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.065101] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1639.065101] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225faee-38fe-f980-61f9-eba6acc0dcfb" [ 1639.065101] env[62816]: _type = "HttpNfcLease" [ 1639.065101] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1639.168547] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "interface-ecf6469a-c110-4e29-b931-6f9a3b0144dc-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1639.168863] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "interface-ecf6469a-c110-4e29-b931-6f9a3b0144dc-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.169213] env[62816]: DEBUG nova.objects.instance [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lazy-loading 'flavor' on Instance uuid ecf6469a-c110-4e29-b931-6f9a3b0144dc {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1639.323323] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "refresh_cache-75165526-2744-40b3-b311-45d13cc48cf1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.323520] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquired lock "refresh_cache-75165526-2744-40b3-b311-45d13cc48cf1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.323703] env[62816]: DEBUG nova.network.neutron [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.325519] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.328096] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.844s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1639.328364] env[62816]: DEBUG nova.objects.instance [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lazy-loading 'resources' on Instance uuid b788e586-850b-46e7-a204-d80eac56cce7 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1639.351561] env[62816]: INFO nova.scheduler.client.report [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Deleted allocations for instance e003e41d-93e8-4258-b8ca-3c2420b73df0 [ 1639.566855] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1639.566855] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225faee-38fe-f980-61f9-eba6acc0dcfb" [ 1639.566855] env[62816]: _type = "HttpNfcLease" [ 1639.566855] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1639.673616] env[62816]: DEBUG nova.objects.instance [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lazy-loading 'pci_requests' on Instance uuid ecf6469a-c110-4e29-b931-6f9a3b0144dc {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1639.859592] env[62816]: DEBUG nova.network.neutron [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1639.861931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f570e78c-f399-416c-9934-716a76bfa14d tempest-ServerRescueTestJSON-376332962 tempest-ServerRescueTestJSON-376332962-project-member] Lock "e003e41d-93e8-4258-b8ca-3c2420b73df0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.487s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.026732] env[62816]: DEBUG nova.network.neutron [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Updating instance_info_cache with network_info: [{"id": "6c4ae91b-5f2b-45f1-9305-288569be421f", "address": "fa:16:3e:85:0b:d9", "network": {"id": "ed810110-e2ef-41d2-9fd7-c8d37680c11c", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1902511284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5e759640e745e0898669640785470f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4ae91b-5f", "ovs_interfaceid": "6c4ae91b-5f2b-45f1-9305-288569be421f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1640.067625] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1640.067625] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225faee-38fe-f980-61f9-eba6acc0dcfb" [ 1640.067625] env[62816]: _type = "HttpNfcLease" [ 1640.067625] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1640.070151] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1640.070151] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5225faee-38fe-f980-61f9-eba6acc0dcfb" [ 1640.070151] env[62816]: _type = "HttpNfcLease" [ 1640.070151] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1640.071101] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b545aa3-ab18-4ee0-8119-445ca7728b31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.079063] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52070ddd-e635-ba43-d8a2-c6c850b37188/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1640.079269] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52070ddd-e635-ba43-d8a2-c6c850b37188/disk-0.vmdk. {{(pid=62816) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1640.146469] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f8b76cc6-94f2-423f-94cf-877173578ede {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.176452] env[62816]: DEBUG nova.objects.base [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1640.176769] env[62816]: DEBUG nova.network.neutron [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1640.227717] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791debeb-01ef-45bf-8018-55333a8eeafe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.235464] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e79dca-43a8-4698-abac-692226491c78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.270285] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c67e9302-8a9f-4a23-961a-4670abf8c373 tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "interface-ecf6469a-c110-4e29-b931-6f9a3b0144dc-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.101s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.272144] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c98204-50dc-4b33-a141-f0968f349694 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.280804] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99538235-ec86-4d08-85ed-06c8151f1b8d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.296922] env[62816]: DEBUG nova.compute.provider_tree [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.529826] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Releasing lock "refresh_cache-75165526-2744-40b3-b311-45d13cc48cf1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.530374] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Instance network_info: |[{"id": "6c4ae91b-5f2b-45f1-9305-288569be421f", "address": "fa:16:3e:85:0b:d9", "network": {"id": "ed810110-e2ef-41d2-9fd7-c8d37680c11c", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1902511284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5e759640e745e0898669640785470f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4ae91b-5f", "ovs_interfaceid": "6c4ae91b-5f2b-45f1-9305-288569be421f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1640.531063] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:0b:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccf76700-491b-4462-ab19-e6d3a9ff87ac', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c4ae91b-5f2b-45f1-9305-288569be421f', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.540532] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Creating folder: Project (af5e759640e745e0898669640785470f). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.542948] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c09ad204-3f87-4484-a783-c9e2428e0c62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.554344] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Created folder: Project (af5e759640e745e0898669640785470f) in parent group-v370905. [ 1640.554576] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Creating folder: Instances. Parent ref: group-v371092. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.554825] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56c9bc68-1546-45db-9b48-8fb80a7eb2b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.566864] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Created folder: Instances in parent group-v371092. [ 1640.567179] env[62816]: DEBUG oslo.service.loopingcall [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.567394] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.567612] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e6a693e-29cc-4255-b35c-98caf27a449f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.591734] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.591734] env[62816]: value = "task-1788646" [ 1640.591734] env[62816]: _type = "Task" [ 1640.591734] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.601882] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788646, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.800019] env[62816]: DEBUG nova.scheduler.client.report [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1641.014229] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Completed reading data from the image iterator. {{(pid=62816) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1641.014496] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52070ddd-e635-ba43-d8a2-c6c850b37188/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1641.015439] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97dc108-9a1d-4924-ad22-fd81bbf0fdc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.023886] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52070ddd-e635-ba43-d8a2-c6c850b37188/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1641.023886] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52070ddd-e635-ba43-d8a2-c6c850b37188/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1641.023886] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-006ff16e-646a-45c5-8261-ae668df70a53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.103964] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788646, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.221316] env[62816]: DEBUG oslo_vmware.rw_handles [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52070ddd-e635-ba43-d8a2-c6c850b37188/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1641.221567] env[62816]: INFO nova.virt.vmwareapi.images [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Downloaded image file data 3dbb4887-cb05-4553-b496-bc3e99336442 [ 1641.222418] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461275ca-6247-460e-b919-90b0ca39a9b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.238090] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93c7db17-0bd0-45c8-be59-4a41fd1cb87c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.271664] env[62816]: INFO nova.virt.vmwareapi.images [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] The imported VM was unregistered [ 1641.273445] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Caching image {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1641.273684] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating directory with path [datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.274020] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b593468e-fade-45d1-a352-f80cea59faa4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.295328] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created directory with path [datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.295553] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3/OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3.vmdk to [datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk. {{(pid=62816) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1641.295824] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-3faf9653-2022-4483-911c-c58dc49c6b86 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.305967] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.978s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.308030] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1641.308030] env[62816]: value = "task-1788648" [ 1641.308030] env[62816]: _type = "Task" [ 1641.308030] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.308510] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.985s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.308758] env[62816]: DEBUG nova.objects.instance [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lazy-loading 'resources' on Instance uuid ede88298-0eae-4471-b602-c26b5fa7a72a {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1641.321017] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.335106] env[62816]: INFO nova.scheduler.client.report [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Deleted allocations for instance b788e586-850b-46e7-a204-d80eac56cce7 [ 1641.408888] env[62816]: DEBUG nova.compute.manager [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Received event network-changed-6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1641.408888] env[62816]: DEBUG nova.compute.manager [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Refreshing instance network info cache due to event network-changed-6c4ae91b-5f2b-45f1-9305-288569be421f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1641.408888] env[62816]: DEBUG oslo_concurrency.lockutils [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] Acquiring lock "refresh_cache-75165526-2744-40b3-b311-45d13cc48cf1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.409339] env[62816]: DEBUG oslo_concurrency.lockutils [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] Acquired lock "refresh_cache-75165526-2744-40b3-b311-45d13cc48cf1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.409339] env[62816]: DEBUG nova.network.neutron [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Refreshing network info cache for port 6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.605849] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788646, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.823572] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.843883] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75f43442-5ba5-473c-b8ec-646408e0f872 tempest-ServerPasswordTestJSON-2012136199 tempest-ServerPasswordTestJSON-2012136199-project-member] Lock "b788e586-850b-46e7-a204-d80eac56cce7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.979s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.112459] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788646, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.248134] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcb50f5-4634-488f-9ad5-fc88f70a17c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.252462] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.253059] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.253234] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.253464] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1642.253639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1642.259048] env[62816]: INFO nova.compute.manager [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Terminating instance [ 1642.261925] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6af9459-b1f4-4083-b2fc-5e47430f8bb6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.271188] env[62816]: DEBUG nova.compute.manager [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1642.271188] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1642.271188] env[62816]: DEBUG nova.network.neutron [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Updated VIF entry in instance network info cache for port 6c4ae91b-5f2b-45f1-9305-288569be421f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1642.271188] env[62816]: DEBUG nova.network.neutron [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Updating instance_info_cache with network_info: [{"id": "6c4ae91b-5f2b-45f1-9305-288569be421f", "address": "fa:16:3e:85:0b:d9", "network": {"id": "ed810110-e2ef-41d2-9fd7-c8d37680c11c", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1902511284-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "af5e759640e745e0898669640785470f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccf76700-491b-4462-ab19-e6d3a9ff87ac", "external-id": "nsx-vlan-transportzone-956", "segmentation_id": 956, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c4ae91b-5f", "ovs_interfaceid": "6c4ae91b-5f2b-45f1-9305-288569be421f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.271188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e84ffed-2b9a-4b64-a7e3-e70796f7d731 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.279501] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1642.307520] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5613de17-cf32-4c59-a4d4-3f86901f552e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.310994] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdc5155-fb59-4323-a50e-77366ea7e8ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.332939] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffbca14f-9430-443c-ad81-a0123e93ad3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.337472] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.337853] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1642.337853] env[62816]: value = "task-1788649" [ 1642.337853] env[62816]: _type = "Task" [ 1642.337853] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.352346] env[62816]: DEBUG nova.compute.provider_tree [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1642.360434] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788649, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.607609] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788646, 'name': CreateVM_Task, 'duration_secs': 1.539478} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.608091] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1642.608577] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1642.608808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.609153] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1642.609451] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee7894c3-bd22-45f0-8c8b-148babe877e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.615016] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1642.615016] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e89529-7620-1f84-0252-0211a08d6382" [ 1642.615016] env[62816]: _type = "Task" [ 1642.615016] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.623747] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e89529-7620-1f84-0252-0211a08d6382, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.774954] env[62816]: DEBUG oslo_concurrency.lockutils [req-27723100-be0b-418b-8e78-8a096d1882c8 req-1d306fab-5e64-42fc-ad2b-c69627fbfe05 service nova] Releasing lock "refresh_cache-75165526-2744-40b3-b311-45d13cc48cf1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.824330] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.848105] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788649, 'name': PowerOffVM_Task, 'duration_secs': 0.259374} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.848404] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1642.848609] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1642.848877] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76df13ad-fefd-4ee2-9a53-f92f329de697 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.859952] env[62816]: DEBUG nova.scheduler.client.report [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1643.029604] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1643.029893] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1643.030259] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Deleting the datastore file [datastore1] ecf6469a-c110-4e29-b931-6f9a3b0144dc {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1643.030561] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e167ea7-2875-4acb-a7be-35b74046892c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.039777] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for the task: (returnval){ [ 1643.039777] env[62816]: value = "task-1788651" [ 1643.039777] env[62816]: _type = "Task" [ 1643.039777] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.050088] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.126799] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e89529-7620-1f84-0252-0211a08d6382, 'name': SearchDatastore_Task, 'duration_secs': 0.07695} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.126799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.126874] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1643.127101] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1643.127249] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1643.127513] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1643.127808] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cdce3b1b-0111-40f4-a110-50c86724022b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.151323] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1643.151584] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1643.152361] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6877598-03f7-4026-9101-aeb08030b8cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.167037] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1643.167037] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525bc3d7-2973-8e9f-0965-a98d1c4cc603" [ 1643.167037] env[62816]: _type = "Task" [ 1643.167037] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.173431] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525bc3d7-2973-8e9f-0965-a98d1c4cc603, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.330819] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.369589] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.370056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.702s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.370302] env[62816]: DEBUG nova.objects.instance [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lazy-loading 'resources' on Instance uuid a60d4ff0-af76-4489-840b-ff7f6c23b2ab {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1643.394359] env[62816]: INFO nova.scheduler.client.report [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Deleted allocations for instance ede88298-0eae-4471-b602-c26b5fa7a72a [ 1643.554760] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.678215] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525bc3d7-2973-8e9f-0965-a98d1c4cc603, 'name': SearchDatastore_Task, 'duration_secs': 0.096087} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.679312] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-992c3fa2-d024-40f4-816f-f2d78f84ef05 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.687120] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1643.687120] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52305fbe-b2f2-78b5-1a0e-cc955803083a" [ 1643.687120] env[62816]: _type = "Task" [ 1643.687120] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.696583] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52305fbe-b2f2-78b5-1a0e-cc955803083a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.833199] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.906777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5f2d6c-7d81-4e3b-8ab5-d7c54ecf3aff tempest-ServersNegativeTestMultiTenantJSON-261119415 tempest-ServersNegativeTestMultiTenantJSON-261119415-project-member] Lock "ede88298-0eae-4471-b602-c26b5fa7a72a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.592s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1644.051378] env[62816]: DEBUG oslo_vmware.api [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Task: {'id': task-1788651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.91449} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.056701] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.056701] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1644.056701] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1644.056701] env[62816]: INFO nova.compute.manager [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Took 1.79 seconds to destroy the instance on the hypervisor. [ 1644.057016] env[62816]: DEBUG oslo.service.loopingcall [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1644.057457] env[62816]: DEBUG nova.compute.manager [-] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1644.057564] env[62816]: DEBUG nova.network.neutron [-] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1644.203918] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52305fbe-b2f2-78b5-1a0e-cc955803083a, 'name': SearchDatastore_Task, 'duration_secs': 0.073922} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.204404] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.204763] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 75165526-2744-40b3-b311-45d13cc48cf1/75165526-2744-40b3-b311-45d13cc48cf1.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1644.205058] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1b76c46c-343e-4a96-a7a3-b07ac1553bb7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.218915] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1644.218915] env[62816]: value = "task-1788652" [ 1644.218915] env[62816]: _type = "Task" [ 1644.218915] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.231444] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.330309] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788648, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.623482} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.330828] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3/OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3.vmdk to [datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk. [ 1644.330899] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Cleaning up location [datastore1] OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1644.331156] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_71daaee8-c5e6-40c8-b217-f434aa06ddf3 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1644.331489] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abc07ff6-7e89-488b-9386-0bc3ae267be2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.341970] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1644.341970] env[62816]: value = "task-1788653" [ 1644.341970] env[62816]: _type = "Task" [ 1644.341970] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.353230] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788653, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.355411] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271bbf41-54d2-4161-a8e2-a2d12cf32d3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.366981] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e32265-699a-46f1-a5b8-f5eafd027568 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.403549] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cca4e0-f45b-496d-8562-f74289f3305c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.412472] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8cef00-0937-4741-add9-ad6f5b9ab366 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.428022] env[62816]: DEBUG nova.compute.provider_tree [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1644.606481] env[62816]: DEBUG nova.compute.manager [req-429b75b7-302f-4ac0-9466-bdafcdea7c36 req-35331fe7-5f9f-470e-9f5f-4593765bca91 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Received event network-vif-deleted-98aca667-93d3-4672-9068-92a6781b5d7b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1644.606634] env[62816]: INFO nova.compute.manager [req-429b75b7-302f-4ac0-9466-bdafcdea7c36 req-35331fe7-5f9f-470e-9f5f-4593765bca91 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Neutron deleted interface 98aca667-93d3-4672-9068-92a6781b5d7b; detaching it from the instance and deleting it from the info cache [ 1644.606812] env[62816]: DEBUG nova.network.neutron [req-429b75b7-302f-4ac0-9466-bdafcdea7c36 req-35331fe7-5f9f-470e-9f5f-4593765bca91 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.730073] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788652, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475198} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.730865] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 75165526-2744-40b3-b311-45d13cc48cf1/75165526-2744-40b3-b311-45d13cc48cf1.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1644.731310] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1644.731755] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0be967ac-8ea1-4bc4-9424-94b73ea26e0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.740228] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1644.740228] env[62816]: value = "task-1788654" [ 1644.740228] env[62816]: _type = "Task" [ 1644.740228] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.753344] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.853699] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788653, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.049822} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.854030] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.854242] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.854520] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk to [datastore1] c4117422-edd4-49a0-882c-2d8ae39b344d/c4117422-edd4-49a0-882c-2d8ae39b344d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1644.854865] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4a4c9f1-b94a-4ac2-9402-a82deaa6b8bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.861326] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1644.861326] env[62816]: value = "task-1788655" [ 1644.861326] env[62816]: _type = "Task" [ 1644.861326] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.874248] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788655, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.932023] env[62816]: DEBUG nova.scheduler.client.report [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1644.965848] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5277c189-fa20-503f-6233-a5dfdef46e2a/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1644.966728] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcdc6c3d-c91f-435a-a1b4-512e03becf89 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.973926] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5277c189-fa20-503f-6233-a5dfdef46e2a/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1644.974069] env[62816]: ERROR oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5277c189-fa20-503f-6233-a5dfdef46e2a/disk-0.vmdk due to incomplete transfer. [ 1644.974339] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a42f24fd-8419-4ab5-bff2-e35f31a5b674 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.983149] env[62816]: DEBUG oslo_vmware.rw_handles [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5277c189-fa20-503f-6233-a5dfdef46e2a/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1644.983377] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Uploaded image 8286f5f3-e792-4aa2-b92b-5847b85da401 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1644.986106] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1644.986929] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9f58d1b9-8d99-4db7-a6f2-6e156adc20cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.993160] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1644.993160] env[62816]: value = "task-1788656" [ 1644.993160] env[62816]: _type = "Task" [ 1644.993160] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.002850] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788656, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.058289] env[62816]: DEBUG nova.network.neutron [-] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.110574] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a32d558-7068-42b2-be79-010d8cad5b23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.123992] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f836d44a-2c9b-40c8-a61c-a41e4501f29d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.164497] env[62816]: DEBUG nova.compute.manager [req-429b75b7-302f-4ac0-9466-bdafcdea7c36 req-35331fe7-5f9f-470e-9f5f-4593765bca91 service nova] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Detach interface failed, port_id=98aca667-93d3-4672-9068-92a6781b5d7b, reason: Instance ecf6469a-c110-4e29-b931-6f9a3b0144dc could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1645.250934] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070042} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.251184] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1645.252158] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cd7bad-4622-4831-a126-2502b72d874f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.276634] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 75165526-2744-40b3-b311-45d13cc48cf1/75165526-2744-40b3-b311-45d13cc48cf1.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1645.276842] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cdf33407-88d8-4976-bb8c-c4cddf4da315 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.296526] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1645.296526] env[62816]: value = "task-1788657" [ 1645.296526] env[62816]: _type = "Task" [ 1645.296526] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.305497] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.373497] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788655, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.437928] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.440992] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.946s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.441364] env[62816]: DEBUG nova.objects.instance [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1645.467230] env[62816]: INFO nova.scheduler.client.report [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Deleted allocations for instance a60d4ff0-af76-4489-840b-ff7f6c23b2ab [ 1645.507081] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788656, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.560989] env[62816]: INFO nova.compute.manager [-] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Took 1.50 seconds to deallocate network for instance. [ 1645.810881] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788657, 'name': ReconfigVM_Task, 'duration_secs': 0.339228} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.811202] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 75165526-2744-40b3-b311-45d13cc48cf1/75165526-2744-40b3-b311-45d13cc48cf1.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1645.811867] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25e29b87-e7d1-45fe-bea1-7efb01475257 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.819717] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1645.819717] env[62816]: value = "task-1788658" [ 1645.819717] env[62816]: _type = "Task" [ 1645.819717] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.828116] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788658, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.874911] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788655, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.975651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f05199bc-0073-4962-a9ce-b51bfe3ebf9b tempest-ImagesOneServerNegativeTestJSON-2103110331 tempest-ImagesOneServerNegativeTestJSON-2103110331-project-member] Lock "a60d4ff0-af76-4489-840b-ff7f6c23b2ab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.256s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.008411] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788656, 'name': Destroy_Task, 'duration_secs': 0.576149} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.008532] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Destroyed the VM [ 1646.008996] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1646.009097] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3d63638b-78ce-4719-930e-c800fc80a6a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.015698] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1646.015698] env[62816]: value = "task-1788659" [ 1646.015698] env[62816]: _type = "Task" [ 1646.015698] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.028674] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788659, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.074250] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.335232] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788658, 'name': Rename_Task, 'duration_secs': 0.163318} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.335232] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1646.335379] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ee954d4-a4fa-4a7f-b8cc-50535734b381 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.343743] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1646.343743] env[62816]: value = "task-1788660" [ 1646.343743] env[62816]: _type = "Task" [ 1646.343743] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.356028] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788660, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.377901] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788655, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.452674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e618d20-949b-4d18-ad38-babc666dcb55 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.455034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.279s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.455034] env[62816]: DEBUG nova.objects.instance [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lazy-loading 'pci_requests' on Instance uuid 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1646.525273] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788659, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.862155] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788660, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.875936] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788655, 'name': CopyVirtualDisk_Task} progress is 85%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.959872] env[62816]: DEBUG nova.objects.instance [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lazy-loading 'numa_topology' on Instance uuid 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.031329] env[62816]: DEBUG oslo_vmware.api [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788659, 'name': RemoveSnapshot_Task, 'duration_secs': 0.794477} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.031329] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1647.031329] env[62816]: INFO nova.compute.manager [None req-31bbf35a-3ec1-4469-9c05-ac5d65b594f5 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Took 13.78 seconds to snapshot the instance on the hypervisor. [ 1647.365154] env[62816]: DEBUG oslo_vmware.api [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788660, 'name': PowerOnVM_Task, 'duration_secs': 0.548885} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.365528] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1647.365781] env[62816]: INFO nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1647.366020] env[62816]: DEBUG nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1647.367120] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe5ce19-3b4b-4349-97e6-280676b1d391 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.389645] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788655, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.351614} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.390220] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/3dbb4887-cb05-4553-b496-bc3e99336442/3dbb4887-cb05-4553-b496-bc3e99336442.vmdk to [datastore1] c4117422-edd4-49a0-882c-2d8ae39b344d/c4117422-edd4-49a0-882c-2d8ae39b344d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1647.391068] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dea6f3-5ba3-4254-92d1-b88b3ab5ca2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.414919] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] c4117422-edd4-49a0-882c-2d8ae39b344d/c4117422-edd4-49a0-882c-2d8ae39b344d.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1647.415293] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cdba55f-8771-446a-b984-fdb850b2766a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.436505] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1647.436505] env[62816]: value = "task-1788661" [ 1647.436505] env[62816]: _type = "Task" [ 1647.436505] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.445152] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.462313] env[62816]: INFO nova.compute.claims [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1647.902814] env[62816]: INFO nova.compute.manager [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Took 41.17 seconds to build instance. [ 1647.956925] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788661, 'name': ReconfigVM_Task, 'duration_secs': 0.291229} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.956925] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Reconfigured VM instance instance-00000041 to attach disk [datastore1] c4117422-edd4-49a0-882c-2d8ae39b344d/c4117422-edd4-49a0-882c-2d8ae39b344d.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.956925] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1dbe9d38-2f17-42bc-a780-8d2393e17be1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.964568] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1647.964568] env[62816]: value = "task-1788662" [ 1647.964568] env[62816]: _type = "Task" [ 1647.964568] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.979800] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788662, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.404865] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e9f832c-74e8-46d5-9373-a2b4d95f08d6 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "75165526-2744-40b3-b311-45d13cc48cf1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.683s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.480042] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788662, 'name': Rename_Task, 'duration_secs': 0.143135} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.480538] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1648.480859] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd083bac-bbb7-4cc3-9699-5099e5537bbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.488652] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1648.488652] env[62816]: value = "task-1788663" [ 1648.488652] env[62816]: _type = "Task" [ 1648.488652] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.496892] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788663, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.850216] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078a57bc-f4d5-46d9-8240-aa158da55359 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.859438] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba2b403-da8f-4644-8001-769bb16e811f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.898500] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194d5eb2-6b06-426c-bf3f-c94e01842c72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.907369] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dce8707-e059-4068-b172-c884d34ae92a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.922775] env[62816]: DEBUG nova.compute.provider_tree [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.998728] env[62816]: DEBUG oslo_vmware.api [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788663, 'name': PowerOnVM_Task, 'duration_secs': 0.428287} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.999009] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1648.999212] env[62816]: INFO nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Took 17.17 seconds to spawn the instance on the hypervisor. [ 1648.999470] env[62816]: DEBUG nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1649.000182] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d32183-6eaf-4a17-87f9-5dcf0306a3c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.012785] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "75165526-2744-40b3-b311-45d13cc48cf1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.013048] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "75165526-2744-40b3-b311-45d13cc48cf1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.013309] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "75165526-2744-40b3-b311-45d13cc48cf1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.014901] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "75165526-2744-40b3-b311-45d13cc48cf1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.014901] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "75165526-2744-40b3-b311-45d13cc48cf1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.016918] env[62816]: INFO nova.compute.manager [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Terminating instance [ 1649.022925] env[62816]: DEBUG nova.compute.manager [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1649.023152] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1649.023965] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875a8233-bb89-4310-bc76-e124a351bda9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.032289] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1649.032289] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01b05e58-6b03-422c-b301-36ae6aba66b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.038171] env[62816]: DEBUG oslo_vmware.api [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1649.038171] env[62816]: value = "task-1788664" [ 1649.038171] env[62816]: _type = "Task" [ 1649.038171] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.046302] env[62816]: DEBUG oslo_vmware.api [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788664, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.425728] env[62816]: DEBUG nova.scheduler.client.report [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1649.517930] env[62816]: INFO nova.compute.manager [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Took 46.26 seconds to build instance. [ 1649.548172] env[62816]: DEBUG oslo_vmware.api [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788664, 'name': PowerOffVM_Task, 'duration_secs': 0.188639} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.548482] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1649.548723] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1649.549011] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-727b78ed-cf2f-4c5e-a404-ed19c8be7df7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.818044] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "c4117422-edd4-49a0-882c-2d8ae39b344d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.881373] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1649.881611] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1649.881798] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Deleting the datastore file [datastore1] 75165526-2744-40b3-b311-45d13cc48cf1 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1649.882085] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-222f8329-e66f-496d-b7ad-cce238259f8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.888718] env[62816]: DEBUG oslo_vmware.api [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for the task: (returnval){ [ 1649.888718] env[62816]: value = "task-1788666" [ 1649.888718] env[62816]: _type = "Task" [ 1649.888718] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.896538] env[62816]: DEBUG oslo_vmware.api [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788666, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.931734] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.478s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.933994] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.205s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.934346] env[62816]: DEBUG nova.objects.instance [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'resources' on Instance uuid 1c3392d3-cfb0-47c6-9366-8c363ad21297 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1649.978663] env[62816]: INFO nova.network.neutron [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating port f2f2e184-1921-455c-b435-44548769245c with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1650.019572] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d24fcff0-5458-4d0d-9941-f601809dca66 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.773s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.019835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.202s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.020078] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "c4117422-edd4-49a0-882c-2d8ae39b344d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.020290] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.020462] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.022735] env[62816]: INFO nova.compute.manager [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Terminating instance [ 1650.024630] env[62816]: DEBUG nova.compute.manager [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1650.024824] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1650.025652] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd201564-8ad6-4625-a8c8-f92bf10bd8da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.034837] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1650.035587] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f70bc67-f88e-4d1a-9bd9-f6153db5bdb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.041320] env[62816]: DEBUG oslo_vmware.api [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1650.041320] env[62816]: value = "task-1788667" [ 1650.041320] env[62816]: _type = "Task" [ 1650.041320] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.050316] env[62816]: DEBUG oslo_vmware.api [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788667, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.356421] env[62816]: DEBUG nova.compute.manager [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1650.356421] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2f35334-887c-49ed-8200-d1eb3e71f29e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.398822] env[62816]: DEBUG oslo_vmware.api [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Task: {'id': task-1788666, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.311682} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.399362] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1650.399362] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1650.399539] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1650.399889] env[62816]: INFO nova.compute.manager [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Took 1.38 seconds to destroy the instance on the hypervisor. [ 1650.399978] env[62816]: DEBUG oslo.service.loopingcall [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1650.400825] env[62816]: DEBUG nova.compute.manager [-] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1650.400920] env[62816]: DEBUG nova.network.neutron [-] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1650.565162] env[62816]: DEBUG oslo_vmware.api [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788667, 'name': PowerOffVM_Task, 'duration_secs': 0.192915} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.565162] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1650.565162] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1650.565162] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a7f870e-c59e-4184-9702-bba819bc1eda {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.642370] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1650.642370] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1650.642569] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleting the datastore file [datastore1] c4117422-edd4-49a0-882c-2d8ae39b344d {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1650.642677] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-faf286a9-ea63-4776-91a1-bf068563bbfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.649823] env[62816]: DEBUG oslo_vmware.api [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1650.649823] env[62816]: value = "task-1788669" [ 1650.649823] env[62816]: _type = "Task" [ 1650.649823] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.661019] env[62816]: DEBUG oslo_vmware.api [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788669, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.719479] env[62816]: DEBUG nova.compute.manager [req-c94fd783-1a45-41c5-a378-133e52166639 req-a329d548-b2aa-429a-9980-49efbdd2e0d8 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Received event network-vif-deleted-6c4ae91b-5f2b-45f1-9305-288569be421f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1650.719687] env[62816]: INFO nova.compute.manager [req-c94fd783-1a45-41c5-a378-133e52166639 req-a329d548-b2aa-429a-9980-49efbdd2e0d8 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Neutron deleted interface 6c4ae91b-5f2b-45f1-9305-288569be421f; detaching it from the instance and deleting it from the info cache [ 1650.719861] env[62816]: DEBUG nova.network.neutron [req-c94fd783-1a45-41c5-a378-133e52166639 req-a329d548-b2aa-429a-9980-49efbdd2e0d8 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.805809] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee0035f-b6fa-43d4-b582-815c83b2fbdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.813371] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad07c225-9c61-44e4-8e16-41d2df250646 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.845108] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a569c1e8-96ed-4fb6-8c94-a0cefe6f93e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.856451] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc092815-ec5f-4d80-af8d-05c75a48cc2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.869070] env[62816]: INFO nova.compute.manager [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] instance snapshotting [ 1650.871276] env[62816]: DEBUG nova.compute.provider_tree [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1650.876602] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d675af00-4005-41d0-a4bf-085fc444ec19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.903054] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b9d0d6-d52d-4a80-81ab-4c30970c8b76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.159509] env[62816]: DEBUG oslo_vmware.api [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788669, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13202} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.159509] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1651.159836] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1651.159836] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1651.160356] env[62816]: INFO nova.compute.manager [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1651.160356] env[62816]: DEBUG oslo.service.loopingcall [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1651.160540] env[62816]: DEBUG nova.compute.manager [-] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1651.160540] env[62816]: DEBUG nova.network.neutron [-] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1651.191103] env[62816]: DEBUG nova.network.neutron [-] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.223581] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-87f089d3-09b4-41f1-bc3b-3ea6a73fdb59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.233512] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6e2d4e-fd6c-4ff3-8e5b-0a98f2fcb14a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.265048] env[62816]: DEBUG nova.compute.manager [req-c94fd783-1a45-41c5-a378-133e52166639 req-a329d548-b2aa-429a-9980-49efbdd2e0d8 service nova] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Detach interface failed, port_id=6c4ae91b-5f2b-45f1-9305-288569be421f, reason: Instance 75165526-2744-40b3-b311-45d13cc48cf1 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1651.385502] env[62816]: DEBUG nova.scheduler.client.report [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.414018] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1651.414349] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-18e63ea7-de9f-4d07-9602-715b2ed5d0b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.425673] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1651.425673] env[62816]: value = "task-1788670" [ 1651.425673] env[62816]: _type = "Task" [ 1651.425673] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.433758] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788670, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.627667] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.627667] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.627667] env[62816]: DEBUG nova.network.neutron [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1651.696593] env[62816]: INFO nova.compute.manager [-] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Took 1.30 seconds to deallocate network for instance. [ 1651.893894] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.960s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.896804] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.371s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.897075] env[62816]: DEBUG nova.objects.instance [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lazy-loading 'resources' on Instance uuid 65e97c6a-5d8f-4241-9095-65a5a6132a69 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1651.919270] env[62816]: INFO nova.scheduler.client.report [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Deleted allocations for instance 1c3392d3-cfb0-47c6-9366-8c363ad21297 [ 1651.937150] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788670, 'name': CreateSnapshot_Task, 'duration_secs': 0.475906} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.938297] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1651.938734] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893ab45f-b792-4fd7-88ac-4f5e3ef969af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.952313] env[62816]: DEBUG nova.network.neutron [-] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.207267] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.380837] env[62816]: DEBUG nova.network.neutron [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1652.430205] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6db3ff8d-758a-49ec-a784-679b9d7c62a8 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "1c3392d3-cfb0-47c6-9366-8c363ad21297" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.165s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.459854] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1652.460823] env[62816]: INFO nova.compute.manager [-] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Took 1.30 seconds to deallocate network for instance. [ 1652.461082] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-ca4174b6-f008-417e-8c2e-453b0d04b083 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.471245] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1652.471245] env[62816]: value = "task-1788671" [ 1652.471245] env[62816]: _type = "Task" [ 1652.471245] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.484049] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788671, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.706994] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99c6e50e-1ce8-40af-9acc-3031af58b75e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.715489] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc27cbf-3bd2-4e46-9276-2afba2be0f97 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.747880] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ab1e7d-bf0d-4534-ab01-adf8e80ebb72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.756215] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aab8606-5b05-4687-a160-826fda08f0a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.773517] env[62816]: DEBUG nova.compute.provider_tree [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.805614] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.805879] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.854057] env[62816]: DEBUG nova.compute.manager [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Received event network-vif-deleted-3705da00-5613-4084-9e67-b96640678858 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1652.854315] env[62816]: DEBUG nova.compute.manager [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-vif-plugged-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1652.854530] env[62816]: DEBUG oslo_concurrency.lockutils [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.854940] env[62816]: DEBUG oslo_concurrency.lockutils [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.855216] env[62816]: DEBUG oslo_concurrency.lockutils [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.855398] env[62816]: DEBUG nova.compute.manager [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] No waiting events found dispatching network-vif-plugged-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1652.855565] env[62816]: WARNING nova.compute.manager [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received unexpected event network-vif-plugged-f2f2e184-1921-455c-b435-44548769245c for instance with vm_state shelved_offloaded and task_state spawning. [ 1652.855726] env[62816]: DEBUG nova.compute.manager [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-changed-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1652.855879] env[62816]: DEBUG nova.compute.manager [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Refreshing instance network info cache due to event network-changed-f2f2e184-1921-455c-b435-44548769245c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1652.856060] env[62816]: DEBUG oslo_concurrency.lockutils [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.883642] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.886131] env[62816]: DEBUG oslo_concurrency.lockutils [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.886388] env[62816]: DEBUG nova.network.neutron [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Refreshing network info cache for port f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1652.913733] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='a7673434ca13d1792e1f6db89f34835c',container_format='bare',created_at=2024-12-12T02:53:55Z,direct_url=,disk_format='vmdk',id=6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-131311340-shelved',owner='005f772e517340a0acaac0d61b8262df',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2024-12-12T02:54:08Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1652.913985] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1652.914168] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1652.914361] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1652.914510] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1652.914660] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1652.914908] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1652.915160] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1652.915415] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1652.915603] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1652.915828] env[62816]: DEBUG nova.virt.hardware [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1652.917021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e6a899-ad4e-4ff4-9101-2b26273218a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.925741] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d504cad-a1be-40e8-b544-9253aa646557 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.941435] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:90:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '75ff81f9-72b2-4e58-a8d8-5699907f7459', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f2f2e184-1921-455c-b435-44548769245c', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1652.949152] env[62816]: DEBUG oslo.service.loopingcall [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1652.949455] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1652.949677] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1162f63-47f7-4223-961d-e541405b98ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.969402] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.970488] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1652.970488] env[62816]: value = "task-1788672" [ 1652.970488] env[62816]: _type = "Task" [ 1652.970488] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.982632] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788672, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.986415] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788671, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.280334] env[62816]: DEBUG nova.scheduler.client.report [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1653.308985] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1653.493458] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788672, 'name': CreateVM_Task, 'duration_secs': 0.475994} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.493707] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1653.494465] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.494744] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.495506] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1653.500619] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e1c6889-bd43-4cce-b0f3-55d276035578 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.502499] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788671, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.505531] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1653.505531] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ea1d14-bfe2-8717-5deb-5cb431620f14" [ 1653.505531] env[62816]: _type = "Task" [ 1653.505531] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.518838] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ea1d14-bfe2-8717-5deb-5cb431620f14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.610125] env[62816]: DEBUG nova.network.neutron [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updated VIF entry in instance network info cache for port f2f2e184-1921-455c-b435-44548769245c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1653.610125] env[62816]: DEBUG nova.network.neutron [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.787784] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.891s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.790495] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.560s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.790772] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.793712] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 21.973s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.815091] env[62816]: INFO nova.scheduler.client.report [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted allocations for instance 65e97c6a-5d8f-4241-9095-65a5a6132a69 [ 1653.824726] env[62816]: INFO nova.scheduler.client.report [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleted allocations for instance 9bda24c6-f950-47ff-ad3c-ff745291870c [ 1653.838037] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.990567] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788671, 'name': CloneVM_Task, 'duration_secs': 1.326549} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.990728] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Created linked-clone VM from snapshot [ 1653.991650] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0dd67d-12d0-402e-ad4d-eb71572fb902 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.998802] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Uploading image e4dba3ba-b936-41a1-a932-0be8eccc0c73 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1654.014341] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.014570] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Processing image 6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1654.014809] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.014955] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.015150] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1654.015403] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b95ea6a-6c70-4d05-a710-2aeb2cefee34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.023518] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1654.023518] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1654.024430] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ead4a491-1e20-438c-b65a-f4a7c5001880 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.028379] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1654.028379] env[62816]: value = "vm-371096" [ 1654.028379] env[62816]: _type = "VirtualMachine" [ 1654.028379] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1654.028601] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1de74144-481a-499c-a1ba-5926beab1761 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.032546] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1654.032546] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526b00f9-15fb-a385-7814-b8b563ccd379" [ 1654.032546] env[62816]: _type = "Task" [ 1654.032546] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.036575] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lease: (returnval){ [ 1654.036575] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e02f63-903f-f126-6982-39a285ffd8ed" [ 1654.036575] env[62816]: _type = "HttpNfcLease" [ 1654.036575] env[62816]: } obtained for exporting VM: (result){ [ 1654.036575] env[62816]: value = "vm-371096" [ 1654.036575] env[62816]: _type = "VirtualMachine" [ 1654.036575] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1654.036856] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the lease: (returnval){ [ 1654.036856] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e02f63-903f-f126-6982-39a285ffd8ed" [ 1654.036856] env[62816]: _type = "HttpNfcLease" [ 1654.036856] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1654.043077] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526b00f9-15fb-a385-7814-b8b563ccd379, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.045606] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1654.045606] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e02f63-903f-f126-6982-39a285ffd8ed" [ 1654.045606] env[62816]: _type = "HttpNfcLease" [ 1654.045606] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1654.113697] env[62816]: DEBUG oslo_concurrency.lockutils [req-49512b32-e5d5-4740-87da-4ccbce49eb21 req-a84c6443-8202-449b-9c22-e7a6d696419b service nova] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.332321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8842678b-f2be-465f-bedd-f6323c40f8a2 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "65e97c6a-5d8f-4241-9095-65a5a6132a69" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.877s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.335625] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3443820c-3169-402e-b336-dfc7d93d4070 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "9bda24c6-f950-47ff-ad3c-ff745291870c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.622s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.543898] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Preparing fetch location {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1654.544194] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Fetch image to [datastore1] OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b/OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b.vmdk {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1654.544405] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Downloading stream optimized image 6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f to [datastore1] OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b/OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b.vmdk on the data store datastore1 as vApp {{(pid=62816) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1654.544583] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Downloading image file data 6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f to the ESX as VM named 'OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b' {{(pid=62816) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1654.547980] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1654.547980] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e02f63-903f-f126-6982-39a285ffd8ed" [ 1654.547980] env[62816]: _type = "HttpNfcLease" [ 1654.547980] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1654.548403] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1654.548403] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e02f63-903f-f126-6982-39a285ffd8ed" [ 1654.548403] env[62816]: _type = "HttpNfcLease" [ 1654.548403] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1654.548910] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7c3936-c05a-424d-9a8e-53d167b9b77e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.556164] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283bbdf-75eb-1f87-e2d2-d61936f2a7d7/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1654.556339] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283bbdf-75eb-1f87-e2d2-d61936f2a7d7/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1654.644983] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2072b33e-dd3f-4a65-a452-0f2e451dd612 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.673035] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1654.673035] env[62816]: value = "resgroup-9" [ 1654.673035] env[62816]: _type = "ResourcePool" [ 1654.673035] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1654.673624] env[62816]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-a59e477c-ada2-4e96-a274-7f5d57f88f96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.694821] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lease: (returnval){ [ 1654.694821] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ae5d18-153d-4db9-8a5b-6e1224e4d54d" [ 1654.694821] env[62816]: _type = "HttpNfcLease" [ 1654.694821] env[62816]: } obtained for vApp import into resource pool (val){ [ 1654.694821] env[62816]: value = "resgroup-9" [ 1654.694821] env[62816]: _type = "ResourcePool" [ 1654.694821] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1654.695160] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the lease: (returnval){ [ 1654.695160] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ae5d18-153d-4db9-8a5b-6e1224e4d54d" [ 1654.695160] env[62816]: _type = "HttpNfcLease" [ 1654.695160] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1654.701586] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1654.701586] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ae5d18-153d-4db9-8a5b-6e1224e4d54d" [ 1654.701586] env[62816]: _type = "HttpNfcLease" [ 1654.701586] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1654.841250] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.841566] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0e0261fe-4376-487c-9d54-c4f37577409c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.841840] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0a1a8539-940a-4a17-9826-82736be41892 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.842187] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0dbf907f-0313-435c-a8be-19f7e48ded76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.842432] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1056fc6e-af1e-4d63-a9ce-9ade4dd73891 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.842665] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f9d9593a-1c25-47a1-98fd-4462a851f134 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.842888] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance a01e772c-dafe-4091-bae6-f9f59d5c972d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.843133] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 31ac8296-14fa-46f7-b825-c31904b832d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.843356] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.843603] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 8105e650-8482-40c6-bd7a-b8daea19a0d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.843843] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9745413b-2bd8-45d7-8491-483e4921b59c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.844220] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance dd833e38-691c-4757-9c6b-659c74343d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.844533] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9c246982-b215-46c1-9cd3-63907a515086 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.845906] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance ecf6469a-c110-4e29-b931-6f9a3b0144dc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.845906] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 679cd9a3-2ed6-451f-b934-ba7738913959 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1654.845906] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c4117422-edd4-49a0-882c-2d8ae39b344d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.845906] env[62816]: WARNING nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 75165526-2744-40b3-b311-45d13cc48cf1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1654.991273] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.991559] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.991774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1654.991968] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.992325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1654.994540] env[62816]: INFO nova.compute.manager [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Terminating instance [ 1654.996623] env[62816]: DEBUG nova.compute.manager [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1654.996920] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1654.997886] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f2a49d-0cd8-4556-a2fa-32341b0851a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.006757] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.007080] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a675dab-6068-4340-8e32-83761bf62ff9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.014048] env[62816]: DEBUG oslo_vmware.api [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1655.014048] env[62816]: value = "task-1788675" [ 1655.014048] env[62816]: _type = "Task" [ 1655.014048] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.022489] env[62816]: DEBUG oslo_vmware.api [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.203475] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1655.203475] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ae5d18-153d-4db9-8a5b-6e1224e4d54d" [ 1655.203475] env[62816]: _type = "HttpNfcLease" [ 1655.203475] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1655.203861] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1655.203861] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ae5d18-153d-4db9-8a5b-6e1224e4d54d" [ 1655.203861] env[62816]: _type = "HttpNfcLease" [ 1655.203861] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1655.204724] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24aca25d-8633-4044-a410-9a6374793830 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.212964] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527478e3-62db-88b1-3b9a-95f9debcfe7d/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1655.213293] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527478e3-62db-88b1-3b9a-95f9debcfe7d/disk-0.vmdk. {{(pid=62816) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1655.279873] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f78125e6-89c7-4780-8b1e-a010c3213637 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.347967] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c66fa160-d4dd-429f-8751-f36cb2020ff1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1655.390093] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.390366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.527414] env[62816]: DEBUG oslo_vmware.api [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788675, 'name': PowerOffVM_Task, 'duration_secs': 0.230287} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.529673] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1655.529920] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1655.530367] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.530715] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.531089] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.531408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.531781] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.533570] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f33b0389-7570-48da-a72e-b9620f112e97 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.536565] env[62816]: INFO nova.compute.manager [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Terminating instance [ 1655.542298] env[62816]: DEBUG nova.compute.manager [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1655.542590] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1655.543949] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533a98c0-50a3-41c9-80d8-4bbfe9aff083 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.553820] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1655.555842] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86b6664d-6e96-40a9-9507-8406eee214ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.563908] env[62816]: DEBUG oslo_vmware.api [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1655.563908] env[62816]: value = "task-1788677" [ 1655.563908] env[62816]: _type = "Task" [ 1655.563908] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.577214] env[62816]: DEBUG oslo_vmware.api [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788677, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.634356] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1655.634756] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1655.635504] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleting the datastore file [datastore1] 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1655.635615] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0229dedc-1b44-4bd3-b223-fbd61a9d9a87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.644241] env[62816]: DEBUG oslo_vmware.api [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for the task: (returnval){ [ 1655.644241] env[62816]: value = "task-1788678" [ 1655.644241] env[62816]: _type = "Task" [ 1655.644241] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.655611] env[62816]: DEBUG oslo_vmware.api [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.851443] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 543d69d2-0694-4d57-bbae-f8851ff0f0dc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1655.892606] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1656.076907] env[62816]: DEBUG oslo_vmware.api [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788677, 'name': PowerOffVM_Task, 'duration_secs': 0.216011} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.079646] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.079871] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1656.080239] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cfcce0f-527d-4aff-8801-43f30bfe3cd9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.157482] env[62816]: DEBUG oslo_vmware.api [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Task: {'id': task-1788678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.228815} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.158050] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1656.158614] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1656.158961] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1656.159335] env[62816]: INFO nova.compute.manager [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1656.159817] env[62816]: DEBUG oslo.service.loopingcall [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.163738] env[62816]: DEBUG nova.compute.manager [-] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1656.163964] env[62816]: DEBUG nova.network.neutron [-] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1656.165976] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1656.166150] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1656.166336] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleting the datastore file [datastore1] 1056fc6e-af1e-4d63-a9ce-9ade4dd73891 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1656.166737] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2dc5fdf9-f936-4b26-9177-cb6a578da719 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.174021] env[62816]: DEBUG oslo_vmware.api [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1656.174021] env[62816]: value = "task-1788680" [ 1656.174021] env[62816]: _type = "Task" [ 1656.174021] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.189022] env[62816]: DEBUG oslo_vmware.api [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788680, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.357699] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 8ccce660-6c41-412d-99ac-65ca7915d728 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1656.424136] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.496807] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Completed reading data from the image iterator. {{(pid=62816) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1656.497339] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527478e3-62db-88b1-3b9a-95f9debcfe7d/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1656.498342] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2253217-4042-48e7-aafa-91a3153cc4ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.505065] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527478e3-62db-88b1-3b9a-95f9debcfe7d/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1656.505301] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527478e3-62db-88b1-3b9a-95f9debcfe7d/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1656.505535] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-48cef53e-8c63-405f-84f9-02fef89d209c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.684925] env[62816]: DEBUG oslo_vmware.api [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788680, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.685340] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1656.685547] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1656.685761] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1656.686009] env[62816]: INFO nova.compute.manager [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1656.686211] env[62816]: DEBUG oslo.service.loopingcall [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.686409] env[62816]: DEBUG nova.compute.manager [-] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1656.686525] env[62816]: DEBUG nova.network.neutron [-] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1656.744066] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527478e3-62db-88b1-3b9a-95f9debcfe7d/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1656.744309] env[62816]: INFO nova.virt.vmwareapi.images [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Downloaded image file data 6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f [ 1656.746583] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab123490-29d2-4a85-b48c-da69da552c8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.768129] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9de52535-2ac6-4971-a689-eed32fd38fa4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.773729] env[62816]: DEBUG nova.compute.manager [req-44379176-efe9-44cc-8b3a-b18df426993f req-a9a9b652-65a3-4e96-8245-8919d1f6db3a service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Received event network-vif-deleted-c37f87b0-7404-4bad-89e7-5ebbccb43aad {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.773934] env[62816]: INFO nova.compute.manager [req-44379176-efe9-44cc-8b3a-b18df426993f req-a9a9b652-65a3-4e96-8245-8919d1f6db3a service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Neutron deleted interface c37f87b0-7404-4bad-89e7-5ebbccb43aad; detaching it from the instance and deleting it from the info cache [ 1656.774127] env[62816]: DEBUG nova.network.neutron [req-44379176-efe9-44cc-8b3a-b18df426993f req-a9a9b652-65a3-4e96-8245-8919d1f6db3a service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.802668] env[62816]: INFO nova.virt.vmwareapi.images [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] The imported VM was unregistered [ 1656.804340] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Caching image {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1656.804603] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Creating directory with path [datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1656.804885] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54e5f338-bd37-4deb-9a96-5b9b7fb94e8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.815640] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Created directory with path [datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1656.815851] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b/OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b.vmdk to [datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk. {{(pid=62816) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1656.816135] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-a9d2e94c-759e-403a-b995-5ed4ba58d3bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.823868] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1656.823868] env[62816]: value = "task-1788682" [ 1656.823868] env[62816]: _type = "Task" [ 1656.823868] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.835894] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788682, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.860511] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance a5f50ca4-4648-4f33-a6d3-18cfc4fd3441 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1656.860810] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1656.860958] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1657.194222] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0faefc-5e97-4e9c-b9e4-baebd31a5214 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.204533] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5c8ed0-6508-4fa5-9306-9ca985d15aeb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.237946] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fe782e2-a25a-41f0-8ad9-fee6c447471f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.247812] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caec44f4-5ef1-46dd-824e-9a46ecd1ada3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.252060] env[62816]: DEBUG nova.network.neutron [-] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.265521] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.280236] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ff2d14f-0efd-44c2-9e3f-25e4b0403328 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.292149] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bc5179-6306-42d3-8177-0800069db26b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.329856] env[62816]: DEBUG nova.compute.manager [req-44379176-efe9-44cc-8b3a-b18df426993f req-a9a9b652-65a3-4e96-8245-8919d1f6db3a service nova] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Detach interface failed, port_id=c37f87b0-7404-4bad-89e7-5ebbccb43aad, reason: Instance 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1657.342722] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788682, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.439659] env[62816]: DEBUG nova.network.neutron [-] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.754597] env[62816]: INFO nova.compute.manager [-] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Took 1.59 seconds to deallocate network for instance. [ 1657.790116] env[62816]: ERROR nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [req-e4a82e74-e830-475a-a35e-f9cc77d7b742] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e4a82e74-e830-475a-a35e-f9cc77d7b742"}]} [ 1657.807989] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1657.824087] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1657.825026] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1657.838199] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1657.844035] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788682, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.862343] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1657.942041] env[62816]: INFO nova.compute.manager [-] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Took 1.26 seconds to deallocate network for instance. [ 1658.169008] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eda8dfa-9da5-4ddc-911e-42e99fb5db90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.179430] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369a2232-ca0e-4b1f-8a9f-99e30b443a6f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.213785] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c028212-0f78-4146-9270-a1be8cef209c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.225796] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c075aac-1411-4189-9edb-2b6f26cbe207 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.243964] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1658.261948] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.343281] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788682, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.450233] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.778036] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 96 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1658.778036] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 96 to 97 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1658.778036] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1658.800198] env[62816]: DEBUG nova.compute.manager [req-3efec479-503b-4184-a531-722aae9ba2df req-f9ec26f0-72f7-484b-a41a-715376694940 service nova] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Received event network-vif-deleted-e267cdb4-076e-4451-8184-3f42be642a6e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1658.843476] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788682, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.283879] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1659.284206] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.490s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.284631] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.532s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.286312] env[62816]: INFO nova.compute.claims [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1659.341720] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788682, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.238543} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.341933] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b/OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b.vmdk to [datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk. [ 1659.342144] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Cleaning up location [datastore1] OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1659.342306] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_96bb0553-fa36-49a4-9c6b-1df9f49f124b {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.342556] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f23b3026-0ef9-49dc-b8ff-fdc1dc81cc00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.348717] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1659.348717] env[62816]: value = "task-1788683" [ 1659.348717] env[62816]: _type = "Task" [ 1659.348717] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.356940] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788683, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.858445] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788683, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034043} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.858783] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1659.858858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.859222] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk to [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1659.859415] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c103c1c-03b8-4e63-99fd-f66e26cf7c47 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.866980] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1659.866980] env[62816]: value = "task-1788684" [ 1659.866980] env[62816]: _type = "Task" [ 1659.866980] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.874828] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.378474] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788684, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.592876] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b26ef2-faca-461b-bbe0-a3f7fb389203 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.602596] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178d39d7-4618-471e-8e41-ae2814baad99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.636906] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da18d7ce-91de-4745-8f0a-87d521db88ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.646737] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f189133-a2ea-437d-978f-db445ed8d2ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.661582] env[62816]: DEBUG nova.compute.provider_tree [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1660.880691] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788684, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.194044] env[62816]: DEBUG nova.scheduler.client.report [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 97 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1661.194391] env[62816]: DEBUG nova.compute.provider_tree [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 97 to 98 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1661.194614] env[62816]: DEBUG nova.compute.provider_tree [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1661.382308] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788684, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.702786] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.703389] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1661.706308] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.522s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.708029] env[62816]: INFO nova.compute.claims [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1661.883565] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788684, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.024142] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283bbdf-75eb-1f87-e2d2-d61936f2a7d7/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1662.025259] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f572eb81-dca8-46e9-9b29-753d1154e784 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.034138] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283bbdf-75eb-1f87-e2d2-d61936f2a7d7/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1662.034384] env[62816]: ERROR oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283bbdf-75eb-1f87-e2d2-d61936f2a7d7/disk-0.vmdk due to incomplete transfer. [ 1662.034701] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-98f9838f-422e-4597-8dc2-270ae8280b16 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.045177] env[62816]: DEBUG oslo_vmware.rw_handles [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5283bbdf-75eb-1f87-e2d2-d61936f2a7d7/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1662.045392] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Uploaded image e4dba3ba-b936-41a1-a932-0be8eccc0c73 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1662.048038] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1662.048330] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-25d95ffe-fc25-4211-a7c8-63b29062f2fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.057645] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1662.057645] env[62816]: value = "task-1788685" [ 1662.057645] env[62816]: _type = "Task" [ 1662.057645] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.071197] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788685, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.212970] env[62816]: DEBUG nova.compute.utils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1662.218316] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1662.218316] env[62816]: DEBUG nova.network.neutron [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1662.291774] env[62816]: DEBUG nova.policy [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96acb678bd4641f49f24cdc456595705', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12767255c02a4e16ad13383fdb725593', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1662.380121] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788684, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.278139} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.380413] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f/6e7c330d-2b9e-435b-a0f4-e7ae289a3e4f.vmdk to [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1662.381254] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b8bb9d-47b2-4769-9b80-f54b4019c952 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.403762] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1662.404078] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4e78c543-4422-4ea4-83a5-5683c4a0f8ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.431325] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1662.431325] env[62816]: value = "task-1788686" [ 1662.431325] env[62816]: _type = "Task" [ 1662.431325] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.439371] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788686, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.557941] env[62816]: DEBUG nova.network.neutron [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Successfully created port: 3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1662.568464] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788685, 'name': Destroy_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.718458] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1662.943394] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788686, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.969280] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913b6256-4f8f-49cf-b640-4c6047c49b34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.976429] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1b6323-04ce-402e-b283-f9aea782eb6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.007592] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-badc08b7-82dd-4a85-963a-1aea0bde8198 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.014780] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03f7716-7bc1-47ad-859a-66c34437c46e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.028911] env[62816]: DEBUG nova.compute.provider_tree [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1663.069480] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788685, 'name': Destroy_Task, 'duration_secs': 0.533772} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.069755] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Destroyed the VM [ 1663.070053] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1663.070304] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a0bd29e4-8be1-435c-8346-8e09cffb3d4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.077249] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1663.077249] env[62816]: value = "task-1788687" [ 1663.077249] env[62816]: _type = "Task" [ 1663.077249] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.085049] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788687, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.442074] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788686, 'name': ReconfigVM_Task, 'duration_secs': 1.009013} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.442379] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959/679cd9a3-2ed6-451f-b934-ba7738913959.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1663.443054] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eada954a-2a75-4b41-aad0-441ed55ebf48 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.449840] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1663.449840] env[62816]: value = "task-1788688" [ 1663.449840] env[62816]: _type = "Task" [ 1663.449840] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.458237] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788688, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.533201] env[62816]: DEBUG nova.scheduler.client.report [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1663.588186] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788687, 'name': RemoveSnapshot_Task} progress is 30%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.731292] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1663.762611] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1663.762984] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1663.763238] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1663.763523] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1663.763750] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1663.763979] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1663.764307] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1663.764548] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1663.764801] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1663.765067] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1663.765336] env[62816]: DEBUG nova.virt.hardware [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1663.766535] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f451fbe-e1dc-4a03-b773-dac0c9447124 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.775717] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc19539-825c-4e36-b08b-9a07bd2ca5f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.956354] env[62816]: DEBUG nova.compute.manager [req-616133ac-840d-4b4b-93d2-cf7e197c0c24 req-3d22a530-8446-494e-a31a-2ee444fd9958 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Received event network-vif-plugged-3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1663.956983] env[62816]: DEBUG oslo_concurrency.lockutils [req-616133ac-840d-4b4b-93d2-cf7e197c0c24 req-3d22a530-8446-494e-a31a-2ee444fd9958 service nova] Acquiring lock "c66fa160-d4dd-429f-8751-f36cb2020ff1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.957327] env[62816]: DEBUG oslo_concurrency.lockutils [req-616133ac-840d-4b4b-93d2-cf7e197c0c24 req-3d22a530-8446-494e-a31a-2ee444fd9958 service nova] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.957619] env[62816]: DEBUG oslo_concurrency.lockutils [req-616133ac-840d-4b4b-93d2-cf7e197c0c24 req-3d22a530-8446-494e-a31a-2ee444fd9958 service nova] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.957906] env[62816]: DEBUG nova.compute.manager [req-616133ac-840d-4b4b-93d2-cf7e197c0c24 req-3d22a530-8446-494e-a31a-2ee444fd9958 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] No waiting events found dispatching network-vif-plugged-3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1663.958198] env[62816]: WARNING nova.compute.manager [req-616133ac-840d-4b4b-93d2-cf7e197c0c24 req-3d22a530-8446-494e-a31a-2ee444fd9958 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Received unexpected event network-vif-plugged-3b5a67df-8153-4be4-8afe-6bd3ae2b807b for instance with vm_state building and task_state spawning. [ 1663.965056] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788688, 'name': Rename_Task, 'duration_secs': 0.133663} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.965574] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1663.965947] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1361d45-ceb1-47c9-bcc7-a612e4c937ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.974014] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1663.974014] env[62816]: value = "task-1788689" [ 1663.974014] env[62816]: _type = "Task" [ 1663.974014] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.981516] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.041021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.332s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.041021] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1664.042237] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.470s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.043806] env[62816]: INFO nova.compute.claims [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1664.058379] env[62816]: DEBUG nova.network.neutron [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Successfully updated port: 3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1664.096663] env[62816]: DEBUG oslo_vmware.api [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788687, 'name': RemoveSnapshot_Task, 'duration_secs': 0.542459} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.096961] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1664.097206] env[62816]: INFO nova.compute.manager [None req-09049ff0-9a68-4ba7-a9e7-0eb2d626d8f9 tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Took 13.22 seconds to snapshot the instance on the hypervisor. [ 1664.483631] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788689, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.548791] env[62816]: DEBUG nova.compute.utils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1664.552032] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1664.552145] env[62816]: DEBUG nova.network.neutron [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1664.561069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.561069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.561069] env[62816]: DEBUG nova.network.neutron [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1664.623064] env[62816]: DEBUG nova.policy [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96acb678bd4641f49f24cdc456595705', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12767255c02a4e16ad13383fdb725593', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1664.858829] env[62816]: DEBUG nova.network.neutron [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Successfully created port: 7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1664.982558] env[62816]: DEBUG oslo_vmware.api [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788689, 'name': PowerOnVM_Task, 'duration_secs': 0.52986} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.982797] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1665.054318] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1665.095120] env[62816]: DEBUG nova.compute.manager [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1665.095120] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c3bd6d-fb29-4aca-bbd2-d871316bfb0b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.111164] env[62816]: DEBUG nova.network.neutron [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1665.269187] env[62816]: DEBUG nova.network.neutron [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updating instance_info_cache with network_info: [{"id": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "address": "fa:16:3e:84:b5:d3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5a67df-81", "ovs_interfaceid": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1665.307207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "9c246982-b215-46c1-9cd3-63907a515086" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.307457] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "9c246982-b215-46c1-9cd3-63907a515086" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.307660] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "9c246982-b215-46c1-9cd3-63907a515086-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.307846] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "9c246982-b215-46c1-9cd3-63907a515086-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.308025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "9c246982-b215-46c1-9cd3-63907a515086-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.310221] env[62816]: INFO nova.compute.manager [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Terminating instance [ 1665.311850] env[62816]: DEBUG nova.compute.manager [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1665.312064] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1665.312882] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72abba4-3a05-4cef-8055-6e495a460739 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.320447] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1665.322991] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-816995a3-1395-45c6-b816-96f3778ab2c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.330823] env[62816]: DEBUG oslo_vmware.api [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1665.330823] env[62816]: value = "task-1788690" [ 1665.330823] env[62816]: _type = "Task" [ 1665.330823] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.339579] env[62816]: DEBUG oslo_vmware.api [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788690, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.373228] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8f0afa-c0ae-4408-a594-1986fe2951b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.381252] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ee5fe1-b9b4-4e14-99d6-f3e332c00ef2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.411779] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5929fad-8cf8-4113-a2ae-6729de1ca463 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.420099] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795a3a1e-5ed8-496a-8710-2cb2af0f4c74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.434021] env[62816]: DEBUG nova.compute.provider_tree [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1665.610848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3ab5778c-cb28-4612-898f-b68c6e2872f3 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 40.483s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.771424] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.771759] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Instance network_info: |[{"id": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "address": "fa:16:3e:84:b5:d3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5a67df-81", "ovs_interfaceid": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1665.772392] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:b5:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b5a67df-8153-4be4-8afe-6bd3ae2b807b', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1665.780036] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating folder: Project (12767255c02a4e16ad13383fdb725593). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1665.780316] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da0328fd-30de-4810-a038-077e2b5af2c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.791266] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created folder: Project (12767255c02a4e16ad13383fdb725593) in parent group-v370905. [ 1665.791453] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating folder: Instances. Parent ref: group-v371099. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1665.791687] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-220ac15b-814f-4b3d-8a5e-1453fce7c897 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.800272] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created folder: Instances in parent group-v371099. [ 1665.800498] env[62816]: DEBUG oslo.service.loopingcall [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1665.800681] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1665.800900] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3e9af085-46f2-45aa-aa29-93c09310bcf2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.821470] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1665.821470] env[62816]: value = "task-1788693" [ 1665.821470] env[62816]: _type = "Task" [ 1665.821470] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.830490] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788693, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.838895] env[62816]: DEBUG oslo_vmware.api [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788690, 'name': PowerOffVM_Task, 'duration_secs': 0.178323} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.839171] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1665.839342] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1665.839577] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bd1d8bf-5d78-42ac-ad18-479c073849cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.960718] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1665.961016] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1665.961273] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Deleting the datastore file [datastore1] 9c246982-b215-46c1-9cd3-63907a515086 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1665.961561] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce44e636-9de6-4c10-a648-45c59c4edacc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.967895] env[62816]: DEBUG oslo_vmware.api [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for the task: (returnval){ [ 1665.967895] env[62816]: value = "task-1788695" [ 1665.967895] env[62816]: _type = "Task" [ 1665.967895] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.968711] env[62816]: DEBUG nova.scheduler.client.report [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 98 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1665.968999] env[62816]: DEBUG nova.compute.provider_tree [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 98 to 99 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1665.969233] env[62816]: DEBUG nova.compute.provider_tree [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1665.983955] env[62816]: DEBUG oslo_vmware.api [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788695, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.051879] env[62816]: DEBUG nova.compute.manager [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Received event network-changed-3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1666.051879] env[62816]: DEBUG nova.compute.manager [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Refreshing instance network info cache due to event network-changed-3b5a67df-8153-4be4-8afe-6bd3ae2b807b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1666.051879] env[62816]: DEBUG oslo_concurrency.lockutils [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] Acquiring lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.051879] env[62816]: DEBUG oslo_concurrency.lockutils [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] Acquired lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.052052] env[62816]: DEBUG nova.network.neutron [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Refreshing network info cache for port 3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1666.068728] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1666.095036] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1666.095036] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1666.095036] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1666.095232] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1666.095369] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1666.095519] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1666.095745] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1666.096320] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1666.096320] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1666.096320] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1666.096540] env[62816]: DEBUG nova.virt.hardware [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1666.097814] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d729b2-4d46-44e7-a713-cad5127cb139 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.106414] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75cda72-cb41-409e-8969-1a414cf36621 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.332643] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788693, 'name': CreateVM_Task, 'duration_secs': 0.375477} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.332847] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1666.334028] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.334229] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.334560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1666.334814] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba0ab980-6bc2-463d-9502-8415302113a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.339587] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1666.339587] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cf2169-fd17-98a2-fd7a-016e9ad793aa" [ 1666.339587] env[62816]: _type = "Task" [ 1666.339587] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.347601] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cf2169-fd17-98a2-fd7a-016e9ad793aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.476143] env[62816]: DEBUG nova.network.neutron [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Successfully updated port: 7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1666.477928] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.478923] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1666.485084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.412s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.485179] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.487283] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.281s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.487476] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.489130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.520s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.489327] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.490879] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.653s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1666.492444] env[62816]: INFO nova.compute.claims [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1666.509037] env[62816]: DEBUG oslo_vmware.api [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Task: {'id': task-1788695, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134176} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.509326] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1666.509526] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1666.509805] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1666.509867] env[62816]: INFO nova.compute.manager [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1666.510125] env[62816]: DEBUG oslo.service.loopingcall [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1666.510317] env[62816]: DEBUG nova.compute.manager [-] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1666.510438] env[62816]: DEBUG nova.network.neutron [-] [instance: 9c246982-b215-46c1-9cd3-63907a515086] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1666.519799] env[62816]: INFO nova.scheduler.client.report [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted allocations for instance c4117422-edd4-49a0-882c-2d8ae39b344d [ 1666.524695] env[62816]: INFO nova.scheduler.client.report [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Deleted allocations for instance ecf6469a-c110-4e29-b931-6f9a3b0144dc [ 1666.530824] env[62816]: INFO nova.scheduler.client.report [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Deleted allocations for instance 75165526-2744-40b3-b311-45d13cc48cf1 [ 1666.850847] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cf2169-fd17-98a2-fd7a-016e9ad793aa, 'name': SearchDatastore_Task, 'duration_secs': 0.012665} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.851188] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.851428] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1666.851690] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.851862] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.852063] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1666.852332] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-319c9037-035f-49ea-b600-69c2a72182c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.862832] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1666.863034] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1666.863768] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eea974b6-759c-4f91-8019-0e9e8f47352d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.869119] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1666.869119] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e374d6-e8d7-d994-c42f-fb5aa1044c7c" [ 1666.869119] env[62816]: _type = "Task" [ 1666.869119] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.877132] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e374d6-e8d7-d994-c42f-fb5aa1044c7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.912826] env[62816]: DEBUG nova.network.neutron [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updated VIF entry in instance network info cache for port 3b5a67df-8153-4be4-8afe-6bd3ae2b807b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1666.913228] env[62816]: DEBUG nova.network.neutron [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updating instance_info_cache with network_info: [{"id": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "address": "fa:16:3e:84:b5:d3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5a67df-81", "ovs_interfaceid": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.982344] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1666.982502] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1666.982668] env[62816]: DEBUG nova.network.neutron [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1666.998907] env[62816]: DEBUG nova.compute.utils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1667.003860] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1667.003860] env[62816]: DEBUG nova.network.neutron [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1667.011606] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a97c1a-ea07-4378-864e-86a5f794b27f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.021669] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Suspending the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1667.022207] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c35315e7-8cfc-4ceb-bb6e-96134914e2b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.031087] env[62816]: DEBUG oslo_vmware.api [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1667.031087] env[62816]: value = "task-1788696" [ 1667.031087] env[62816]: _type = "Task" [ 1667.031087] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.031544] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b62f428e-aca8-4b21-9a30-e621c42336bb tempest-AttachInterfacesV270Test-1575108540 tempest-AttachInterfacesV270Test-1575108540-project-member] Lock "ecf6469a-c110-4e29-b931-6f9a3b0144dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.779s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.039996] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4f6bf3bf-4944-40be-819d-d2124fdc56df tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "c4117422-edd4-49a0-882c-2d8ae39b344d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.020s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.043387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fc3136b1-27a1-49cb-83d9-e9162859a376 tempest-InstanceActionsNegativeTestJSON-879723759 tempest-InstanceActionsNegativeTestJSON-879723759-project-member] Lock "75165526-2744-40b3-b311-45d13cc48cf1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.028s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.047872] env[62816]: DEBUG oslo_vmware.api [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788696, 'name': SuspendVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.068288] env[62816]: DEBUG nova.policy [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f798e4071ce4e6c956443db4c414a9b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9b396c0ce6aa41c386d0a1f57a155fd8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1667.380977] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e374d6-e8d7-d994-c42f-fb5aa1044c7c, 'name': SearchDatastore_Task, 'duration_secs': 0.013724} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.381860] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e313dc6-cdc4-4b26-9633-d54d7f7c0c60 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.387806] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1667.387806] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52215540-c36d-6b29-a6c0-6f1f2f1715f8" [ 1667.387806] env[62816]: _type = "Task" [ 1667.387806] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.398391] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215540-c36d-6b29-a6c0-6f1f2f1715f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.418710] env[62816]: DEBUG oslo_concurrency.lockutils [req-e54a2d53-5e6e-4af0-8d11-dfecf4729f28 req-d92c733a-a5ac-4c20-a543-a9a928451941 service nova] Releasing lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.423969] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.423969] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.424111] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.424270] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.424464] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.428107] env[62816]: INFO nova.compute.manager [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Terminating instance [ 1667.430457] env[62816]: DEBUG nova.compute.manager [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1667.431538] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1667.436385] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a41fb99-ad8a-47a0-a138-69b0b48f1819 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.440787] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1667.441726] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b80210e9-4ae7-41a0-ae41-6731ffe76164 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.443493] env[62816]: DEBUG nova.network.neutron [-] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1667.449897] env[62816]: DEBUG oslo_vmware.api [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1667.449897] env[62816]: value = "task-1788697" [ 1667.449897] env[62816]: _type = "Task" [ 1667.449897] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.457999] env[62816]: DEBUG oslo_vmware.api [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788697, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.460840] env[62816]: DEBUG nova.network.neutron [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Successfully created port: 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1667.503896] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1667.544179] env[62816]: DEBUG oslo_vmware.api [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788696, 'name': SuspendVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.575575] env[62816]: DEBUG nova.network.neutron [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1667.837702] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9143a5ec-4ec0-427b-baff-2752bd626081 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.844748] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e257c182-6f5c-4516-b57d-ad5030962f51 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.879381] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b21495-08d5-4ca6-8335-d8d6ef1cf9f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.888484] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2863402-59c8-472b-91a4-95cb9ab92152 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.910272] env[62816]: DEBUG nova.compute.provider_tree [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.917587] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52215540-c36d-6b29-a6c0-6f1f2f1715f8, 'name': SearchDatastore_Task, 'duration_secs': 0.016519} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.918431] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.919461] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/c66fa160-d4dd-429f-8751-f36cb2020ff1.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1667.919942] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e3e9d93-6ef5-4fe4-bd24-76c0d939498c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.929090] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1667.929090] env[62816]: value = "task-1788698" [ 1667.929090] env[62816]: _type = "Task" [ 1667.929090] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.939510] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.946593] env[62816]: INFO nova.compute.manager [-] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Took 1.44 seconds to deallocate network for instance. [ 1667.963719] env[62816]: DEBUG oslo_vmware.api [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788697, 'name': PowerOffVM_Task, 'duration_secs': 0.19741} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.963971] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1667.964224] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1667.964531] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bde5fed-ad20-412d-b684-88ae2b06a636 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.991921] env[62816]: DEBUG nova.network.neutron [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Updating instance_info_cache with network_info: [{"id": "7a7060d4-14aa-43c8-9359-52512eee6df8", "address": "fa:16:3e:2b:df:f3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7060d4-14", "ovs_interfaceid": "7a7060d4-14aa-43c8-9359-52512eee6df8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1668.046029] env[62816]: DEBUG oslo_vmware.api [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788696, 'name': SuspendVM_Task, 'duration_secs': 0.651602} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.046650] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Suspended the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1668.047201] env[62816]: DEBUG nova.compute.manager [None req-28d4326a-3348-4915-ba7b-1f10bae49764 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1668.050940] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e793aef1-2628-411d-929e-486a498e4ad5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.083820] env[62816]: DEBUG nova.compute.manager [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Received event network-vif-plugged-7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1668.083989] env[62816]: DEBUG oslo_concurrency.lockutils [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] Acquiring lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.084321] env[62816]: DEBUG oslo_concurrency.lockutils [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.085013] env[62816]: DEBUG oslo_concurrency.lockutils [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.085272] env[62816]: DEBUG nova.compute.manager [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] No waiting events found dispatching network-vif-plugged-7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1668.085654] env[62816]: WARNING nova.compute.manager [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Received unexpected event network-vif-plugged-7a7060d4-14aa-43c8-9359-52512eee6df8 for instance with vm_state building and task_state spawning. [ 1668.086095] env[62816]: DEBUG nova.compute.manager [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Received event network-changed-7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1668.087069] env[62816]: DEBUG nova.compute.manager [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Refreshing instance network info cache due to event network-changed-7a7060d4-14aa-43c8-9359-52512eee6df8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1668.087069] env[62816]: DEBUG oslo_concurrency.lockutils [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] Acquiring lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1668.166030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.166440] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.207307] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1668.207515] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1668.207859] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleting the datastore file [datastore1] 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1668.208141] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d66c2a6b-6479-4baf-9c30-935ddff1344f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.217312] env[62816]: DEBUG oslo_vmware.api [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1668.217312] env[62816]: value = "task-1788700" [ 1668.217312] env[62816]: _type = "Task" [ 1668.217312] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.225999] env[62816]: DEBUG oslo_vmware.api [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788700, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.419408] env[62816]: DEBUG nova.scheduler.client.report [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1668.438774] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435767} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.438962] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/c66fa160-d4dd-429f-8751-f36cb2020ff1.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1668.439219] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1668.439476] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed588b83-e060-45a6-916f-10a2730f82e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.446544] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1668.446544] env[62816]: value = "task-1788701" [ 1668.446544] env[62816]: _type = "Task" [ 1668.446544] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.454418] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788701, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.456262] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.494908] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1668.495304] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Instance network_info: |[{"id": "7a7060d4-14aa-43c8-9359-52512eee6df8", "address": "fa:16:3e:2b:df:f3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7060d4-14", "ovs_interfaceid": "7a7060d4-14aa-43c8-9359-52512eee6df8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1668.495613] env[62816]: DEBUG oslo_concurrency.lockutils [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] Acquired lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1668.495803] env[62816]: DEBUG nova.network.neutron [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Refreshing network info cache for port 7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1668.500018] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:df:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a7060d4-14aa-43c8-9359-52512eee6df8', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1668.505142] env[62816]: DEBUG oslo.service.loopingcall [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.507503] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1668.507503] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b3f658d2-47c6-484d-ad0a-2c796385e13c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.523981] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1668.532287] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1668.532287] env[62816]: value = "task-1788702" [ 1668.532287] env[62816]: _type = "Task" [ 1668.532287] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.541440] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788702, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.555287] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1668.555503] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1668.555657] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1668.555842] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1668.556300] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1668.556300] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1668.556483] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1668.556483] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1668.556638] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1668.556848] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1668.557054] env[62816]: DEBUG nova.virt.hardware [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1668.558308] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e965ee59-3a08-4ed3-a324-e5204ded16b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.568868] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a73d0bb-e34b-49e9-9209-d479fa220a55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.669444] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1668.737199] env[62816]: DEBUG oslo_vmware.api [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788700, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.338926} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.737352] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1668.737579] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1668.737799] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1668.737989] env[62816]: INFO nova.compute.manager [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Took 1.31 seconds to destroy the instance on the hypervisor. [ 1668.738260] env[62816]: DEBUG oslo.service.loopingcall [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1668.738485] env[62816]: DEBUG nova.compute.manager [-] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1668.738615] env[62816]: DEBUG nova.network.neutron [-] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1668.925389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.928017] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.503s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.929193] env[62816]: INFO nova.compute.claims [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1668.957895] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788701, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.173216} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.958023] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1668.958899] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63a97aa-8a2b-42bb-bbcd-1c1d56b03935 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.993736] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/c66fa160-d4dd-429f-8751-f36cb2020ff1.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1668.993993] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34075840-e585-4034-8160-86d6a6485cb1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.018588] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1669.018588] env[62816]: value = "task-1788703" [ 1669.018588] env[62816]: _type = "Task" [ 1669.018588] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.027578] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788703, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.041665] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788702, 'name': CreateVM_Task, 'duration_secs': 0.410672} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.041933] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1669.043226] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.043226] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.043411] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1669.043740] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a242d717-5ac6-46f7-828c-aaf88de2791d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.049186] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1669.049186] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5228f441-0774-6a49-7875-a10b216c1aea" [ 1669.049186] env[62816]: _type = "Task" [ 1669.049186] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.056928] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5228f441-0774-6a49-7875-a10b216c1aea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.211848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.275768] env[62816]: DEBUG nova.network.neutron [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Updated VIF entry in instance network info cache for port 7a7060d4-14aa-43c8-9359-52512eee6df8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1669.276157] env[62816]: DEBUG nova.network.neutron [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Updating instance_info_cache with network_info: [{"id": "7a7060d4-14aa-43c8-9359-52512eee6df8", "address": "fa:16:3e:2b:df:f3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7060d4-14", "ovs_interfaceid": "7a7060d4-14aa-43c8-9359-52512eee6df8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.428742] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "6e0c93be-ef64-4fb6-9587-274989522616" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.429078] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "6e0c93be-ef64-4fb6-9587-274989522616" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.535246] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788703, 'name': ReconfigVM_Task, 'duration_secs': 0.276493} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.536283] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Reconfigured VM instance instance-00000043 to attach disk [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/c66fa160-d4dd-429f-8751-f36cb2020ff1.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1669.536283] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e9cbc77-13c2-4f65-9539-758ef3b664fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.542852] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1669.542852] env[62816]: value = "task-1788704" [ 1669.542852] env[62816]: _type = "Task" [ 1669.542852] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.553624] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788704, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.563640] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5228f441-0774-6a49-7875-a10b216c1aea, 'name': SearchDatastore_Task, 'duration_secs': 0.016424} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.564085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.564377] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1669.564626] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.564796] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.564973] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1669.565607] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20e3fac2-656b-4a06-8544-409c1eb73a35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.575555] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1669.575776] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1669.576594] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f1b575e-e119-4bf6-86c3-c7916c6ab68b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.583308] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1669.583308] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529ae138-14dd-045a-50ff-d6ca239fa283" [ 1669.583308] env[62816]: _type = "Task" [ 1669.583308] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.591177] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ae138-14dd-045a-50ff-d6ca239fa283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.668736] env[62816]: DEBUG nova.network.neutron [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Successfully updated port: 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1669.673077] env[62816]: INFO nova.compute.manager [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Resuming [ 1669.673756] env[62816]: DEBUG nova.objects.instance [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lazy-loading 'flavor' on Instance uuid 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.717351] env[62816]: DEBUG nova.network.neutron [-] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.779516] env[62816]: DEBUG oslo_concurrency.lockutils [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] Releasing lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1669.779761] env[62816]: DEBUG nova.compute.manager [req-34865970-de0d-4dba-b544-cf12e414c1d7 req-55bcd961-b630-4fdb-8f10-0c810864dfae service nova] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Received event network-vif-deleted-7e97b5a8-3b7b-4ccf-accb-c84c06259813 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1669.932653] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "6e0c93be-ef64-4fb6-9587-274989522616" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.933253] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1670.055860] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788704, 'name': Rename_Task, 'duration_secs': 0.145478} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.056192] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1670.056486] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b04a47b0-d438-4193-956a-7a42ba6345c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.064025] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1670.064025] env[62816]: value = "task-1788705" [ 1670.064025] env[62816]: _type = "Task" [ 1670.064025] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.073094] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788705, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.095938] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ae138-14dd-045a-50ff-d6ca239fa283, 'name': SearchDatastore_Task, 'duration_secs': 0.009351} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.096884] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ba2d332-4b7a-4001-bbb9-fb7f364aa545 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.103843] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1670.103843] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52743036-4123-8f41-4c03-3f4f7fd8f653" [ 1670.103843] env[62816]: _type = "Task" [ 1670.103843] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.113590] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52743036-4123-8f41-4c03-3f4f7fd8f653, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.121935] env[62816]: DEBUG nova.compute.manager [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Received event network-vif-deleted-376123cb-17d7-4137-a4aa-f396ee425d69 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.122323] env[62816]: DEBUG nova.compute.manager [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Received event network-vif-plugged-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.123500] env[62816]: DEBUG oslo_concurrency.lockutils [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] Acquiring lock "8ccce660-6c41-412d-99ac-65ca7915d728-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.123753] env[62816]: DEBUG oslo_concurrency.lockutils [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] Lock "8ccce660-6c41-412d-99ac-65ca7915d728-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.124363] env[62816]: DEBUG oslo_concurrency.lockutils [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] Lock "8ccce660-6c41-412d-99ac-65ca7915d728-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.127386] env[62816]: DEBUG nova.compute.manager [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] No waiting events found dispatching network-vif-plugged-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1670.127606] env[62816]: WARNING nova.compute.manager [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Received unexpected event network-vif-plugged-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 for instance with vm_state building and task_state spawning. [ 1670.128275] env[62816]: DEBUG nova.compute.manager [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Received event network-changed-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1670.128275] env[62816]: DEBUG nova.compute.manager [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Refreshing instance network info cache due to event network-changed-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1670.128275] env[62816]: DEBUG oslo_concurrency.lockutils [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] Acquiring lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.128400] env[62816]: DEBUG oslo_concurrency.lockutils [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] Acquired lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.128495] env[62816]: DEBUG nova.network.neutron [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Refreshing network info cache for port 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1670.168560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.222801] env[62816]: INFO nova.compute.manager [-] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Took 1.48 seconds to deallocate network for instance. [ 1670.251672] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1785c4c-445d-4361-814e-490a04331a20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.261301] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec6ca9e-c662-4dec-a847-03cf20b47a90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.296276] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4af1900-e84f-4c5f-a18c-593a01865a36 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.305382] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47084c0-af07-46a6-b22a-0804c6c5a0a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.321541] env[62816]: DEBUG nova.compute.provider_tree [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.441913] env[62816]: DEBUG nova.compute.utils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1670.443083] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1670.443269] env[62816]: DEBUG nova.network.neutron [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1670.493211] env[62816]: DEBUG nova.policy [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8f9379e0e7214e1f8ce84471c28af212', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f35e68446eb94d5399de61a2ee822a2a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1670.574840] env[62816]: DEBUG oslo_vmware.api [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788705, 'name': PowerOnVM_Task, 'duration_secs': 0.461631} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.574840] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1670.574840] env[62816]: INFO nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1670.574840] env[62816]: DEBUG nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1670.575674] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0b1956-9151-437e-8ce9-fd237d663ce7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.615687] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52743036-4123-8f41-4c03-3f4f7fd8f653, 'name': SearchDatastore_Task, 'duration_secs': 0.00965} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.616062] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.616368] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/543d69d2-0694-4d57-bbae-f8851ff0f0dc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1670.616693] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deb51867-e2d8-4b86-84fd-bdcab15ec247 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.624278] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1670.624278] env[62816]: value = "task-1788706" [ 1670.624278] env[62816]: _type = "Task" [ 1670.624278] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.635510] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788706, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.683442] env[62816]: DEBUG nova.network.neutron [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1670.686378] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1670.686956] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquired lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1670.687479] env[62816]: DEBUG nova.network.neutron [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1670.734038] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.818720] env[62816]: DEBUG nova.network.neutron [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1670.849711] env[62816]: ERROR nova.scheduler.client.report [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [req-ce40d993-188c-4163-9325-caf40be44702] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ce40d993-188c-4163-9325-caf40be44702"}]} [ 1670.876572] env[62816]: DEBUG nova.scheduler.client.report [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1670.896650] env[62816]: DEBUG nova.scheduler.client.report [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1670.897020] env[62816]: DEBUG nova.compute.provider_tree [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.914369] env[62816]: DEBUG nova.scheduler.client.report [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1670.937159] env[62816]: DEBUG nova.scheduler.client.report [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1670.946604] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1671.017022] env[62816]: DEBUG nova.network.neutron [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Successfully created port: 3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.098869] env[62816]: INFO nova.compute.manager [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Took 36.37 seconds to build instance. [ 1671.138571] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788706, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498834} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.138695] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/543d69d2-0694-4d57-bbae-f8851ff0f0dc.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1671.138844] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1671.139820] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c82f600f-1101-4067-be9f-66116df9e152 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.146669] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1671.146669] env[62816]: value = "task-1788707" [ 1671.146669] env[62816]: _type = "Task" [ 1671.146669] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.156556] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.309373] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a120839b-7175-4799-8a3d-c7dd2ff2b00e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.320417] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07be608-ae55-4202-9796-dd09d48f4648 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.325518] env[62816]: DEBUG oslo_concurrency.lockutils [req-ea004a97-0330-4435-866f-1f9ffdc1e3a6 req-4cec798c-ecce-42d6-9cac-3bd36f2ab09b service nova] Releasing lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1671.325849] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquired lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1671.326016] env[62816]: DEBUG nova.network.neutron [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1671.360870] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37689330-707e-47c0-8e63-c067ac1085de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.373869] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0414bdcb-4fcf-4862-b841-03f69b75da86 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.391603] env[62816]: DEBUG nova.compute.provider_tree [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.601544] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3dcc8a2c-5f87-47ff-bdda-68dc99123717 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.876s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.624720] env[62816]: DEBUG nova.network.neutron [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [{"id": "f2f2e184-1921-455c-b435-44548769245c", "address": "fa:16:3e:08:90:14", "network": {"id": "a8633ebc-d5cb-4119-88d9-f91efc4e1bae", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1949391212-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "005f772e517340a0acaac0d61b8262df", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "75ff81f9-72b2-4e58-a8d8-5699907f7459", "external-id": "nsx-vlan-transportzone-978", "segmentation_id": 978, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf2f2e184-19", "ovs_interfaceid": "f2f2e184-1921-455c-b435-44548769245c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1671.658570] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065375} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.658851] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1671.660645] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc25f14a-6c77-414c-9e13-5af913bf6aa6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.685175] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/543d69d2-0694-4d57-bbae-f8851ff0f0dc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1671.685475] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-50761ac8-d157-4bfd-ab86-594886b842ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.708761] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1671.708761] env[62816]: value = "task-1788708" [ 1671.708761] env[62816]: _type = "Task" [ 1671.708761] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.717244] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788708, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.903724] env[62816]: DEBUG nova.network.neutron [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1671.941217] env[62816]: DEBUG nova.scheduler.client.report [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1671.941514] env[62816]: DEBUG nova.compute.provider_tree [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 100 to 101 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1671.942156] env[62816]: DEBUG nova.compute.provider_tree [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.958693] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1671.992386] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1671.992643] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1671.992803] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1671.992989] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1671.993173] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1671.993324] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1671.993533] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1671.993693] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1671.995440] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1671.995649] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1671.995834] env[62816]: DEBUG nova.virt.hardware [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1671.996722] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e6a7ab-50e2-46ba-8b0b-6b7d6e8f305f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.006888] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb497f9-1257-4046-baf8-0799ce115fe8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.127801] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Releasing lock "refresh_cache-679cd9a3-2ed6-451f-b934-ba7738913959" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.130222] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae396b7d-0234-4583-82ee-802c62ada721 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.136979] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Resuming the VM {{(pid=62816) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1672.137247] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-529b6aec-fe14-43a9-8eec-4c665d258c49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.144532] env[62816]: DEBUG oslo_vmware.api [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1672.144532] env[62816]: value = "task-1788709" [ 1672.144532] env[62816]: _type = "Task" [ 1672.144532] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.152236] env[62816]: DEBUG oslo_vmware.api [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788709, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.219078] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788708, 'name': ReconfigVM_Task, 'duration_secs': 0.271357} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.219418] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/543d69d2-0694-4d57-bbae-f8851ff0f0dc.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1672.220073] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffa59ebc-7d17-4fdb-b19f-fb6e9fe5182d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.226535] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1672.226535] env[62816]: value = "task-1788710" [ 1672.226535] env[62816]: _type = "Task" [ 1672.226535] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.241424] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788710, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.290311] env[62816]: DEBUG nova.network.neutron [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updating instance_info_cache with network_info: [{"id": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "address": "fa:16:3e:70:bf:cc", "network": {"id": "603f3279-8e01-4266-9f62-9010696b0166", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-751649207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b396c0ce6aa41c386d0a1f57a155fd8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bef2124-4f", "ovs_interfaceid": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1672.456964] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.529s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.457522] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1672.460321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.199s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.460545] env[62816]: DEBUG nova.objects.instance [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lazy-loading 'resources' on Instance uuid 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1672.657729] env[62816]: DEBUG oslo_vmware.api [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788709, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.663875] env[62816]: DEBUG nova.compute.manager [req-fffcf52a-9a6a-4f31-bae5-4f31322eb0eb req-fb693d94-c162-43c3-82a5-2cd094bbbcc4 service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Received event network-vif-plugged-3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1672.664117] env[62816]: DEBUG oslo_concurrency.lockutils [req-fffcf52a-9a6a-4f31-bae5-4f31322eb0eb req-fb693d94-c162-43c3-82a5-2cd094bbbcc4 service nova] Acquiring lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1672.664367] env[62816]: DEBUG oslo_concurrency.lockutils [req-fffcf52a-9a6a-4f31-bae5-4f31322eb0eb req-fb693d94-c162-43c3-82a5-2cd094bbbcc4 service nova] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1672.664507] env[62816]: DEBUG oslo_concurrency.lockutils [req-fffcf52a-9a6a-4f31-bae5-4f31322eb0eb req-fb693d94-c162-43c3-82a5-2cd094bbbcc4 service nova] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.664659] env[62816]: DEBUG nova.compute.manager [req-fffcf52a-9a6a-4f31-bae5-4f31322eb0eb req-fb693d94-c162-43c3-82a5-2cd094bbbcc4 service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] No waiting events found dispatching network-vif-plugged-3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1672.664827] env[62816]: WARNING nova.compute.manager [req-fffcf52a-9a6a-4f31-bae5-4f31322eb0eb req-fb693d94-c162-43c3-82a5-2cd094bbbcc4 service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Received unexpected event network-vif-plugged-3dd38577-9c08-47e5-afe3-ca2e34af1424 for instance with vm_state building and task_state spawning. [ 1672.737703] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788710, 'name': Rename_Task, 'duration_secs': 0.13973} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.738052] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1672.738318] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0f5af38-2a99-4baf-8389-b4d7c521abf1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.745583] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1672.745583] env[62816]: value = "task-1788711" [ 1672.745583] env[62816]: _type = "Task" [ 1672.745583] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.754346] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.787422] env[62816]: DEBUG nova.network.neutron [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Successfully updated port: 3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1672.793655] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Releasing lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1672.793655] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Instance network_info: |[{"id": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "address": "fa:16:3e:70:bf:cc", "network": {"id": "603f3279-8e01-4266-9f62-9010696b0166", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-751649207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b396c0ce6aa41c386d0a1f57a155fd8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bef2124-4f", "ovs_interfaceid": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1672.794150] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:bf:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3f695b6-65bc-45cc-a61d-3c38a14e5c0c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9bef2124-4f7f-4111-aa0d-0c2d63ad71c6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1672.808162] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Creating folder: Project (9b396c0ce6aa41c386d0a1f57a155fd8). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1672.809800] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-968d0548-24df-410f-9cde-b70f5f922fcd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.822523] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Created folder: Project (9b396c0ce6aa41c386d0a1f57a155fd8) in parent group-v370905. [ 1672.822779] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Creating folder: Instances. Parent ref: group-v371103. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1672.823095] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64866931-4553-49e3-a7cc-e6e22488107e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.833660] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Created folder: Instances in parent group-v371103. [ 1672.833940] env[62816]: DEBUG oslo.service.loopingcall [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1672.834244] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1672.834487] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a9af99e-7ad1-433a-9484-69d6da961247 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.861316] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1672.861316] env[62816]: value = "task-1788714" [ 1672.861316] env[62816]: _type = "Task" [ 1672.861316] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.871026] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788714, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.966962] env[62816]: DEBUG nova.compute.utils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1672.969144] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1672.969814] env[62816]: DEBUG nova.network.neutron [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1673.027836] env[62816]: DEBUG nova.policy [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2f99144f3364fe2b298fc4b579d6be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e4a92683e3f457bb157966a92b48577', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1673.157271] env[62816]: DEBUG oslo_vmware.api [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788709, 'name': PowerOnVM_Task, 'duration_secs': 0.569631} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.160238] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Resumed the VM {{(pid=62816) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1673.160480] env[62816]: DEBUG nova.compute.manager [None req-0c09600a-2bf2-439b-b90e-acf57b8d8523 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1673.163033] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def3f2b4-38c9-4506-8603-ee7effb46fcc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.258343] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788711, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.287346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "refresh_cache-a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.287346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquired lock "refresh_cache-a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.287346] env[62816]: DEBUG nova.network.neutron [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1673.343624] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa290af-ab80-4d9c-a7f1-8c1ace93e528 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.353419] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced28a59-31ae-4362-a8c1-a2000edfa49c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.391654] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e13a786-2043-45ec-a0d6-3a6162f9b733 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.401424] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788714, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.406903] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cc4bd9-fc1c-4837-ba6b-5b807f8d5b1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.424264] env[62816]: DEBUG nova.compute.provider_tree [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.472885] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1673.524361] env[62816]: DEBUG nova.network.neutron [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Successfully created port: 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1673.757311] env[62816]: DEBUG oslo_vmware.api [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788711, 'name': PowerOnVM_Task, 'duration_secs': 0.576281} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.757637] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1673.757854] env[62816]: INFO nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1673.758054] env[62816]: DEBUG nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1673.759044] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddbdf3ab-cc86-41f5-9176-ca84f6ebaf28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.834589] env[62816]: DEBUG nova.network.neutron [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1673.902495] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788714, 'name': CreateVM_Task, 'duration_secs': 0.758794} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.902788] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1673.903661] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.903661] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.903984] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1673.904213] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e3aa7e-a259-4052-aa6e-75add399f7e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.909740] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1673.909740] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5299efc9-c5c6-719e-887c-a406ab7fe02a" [ 1673.909740] env[62816]: _type = "Task" [ 1673.909740] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.920314] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5299efc9-c5c6-719e-887c-a406ab7fe02a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.927681] env[62816]: DEBUG nova.scheduler.client.report [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1674.242965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.243298] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.280499] env[62816]: INFO nova.compute.manager [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Took 39.11 seconds to build instance. [ 1674.321081] env[62816]: DEBUG nova.network.neutron [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Updating instance_info_cache with network_info: [{"id": "3dd38577-9c08-47e5-afe3-ca2e34af1424", "address": "fa:16:3e:4f:b1:4d", "network": {"id": "9b46be99-a7c1-472d-a8c1-5c2d0ec606fa", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-299442947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f35e68446eb94d5399de61a2ee822a2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dd38577-9c", "ovs_interfaceid": "3dd38577-9c08-47e5-afe3-ca2e34af1424", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.424936] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5299efc9-c5c6-719e-887c-a406ab7fe02a, 'name': SearchDatastore_Task, 'duration_secs': 0.051205} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.424936] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.424936] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.424936] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.424936] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.425150] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.425301] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad603179-21ba-4093-93f7-ae414a2faeda {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.433034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.437064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.987s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.437453] env[62816]: DEBUG nova.objects.instance [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lazy-loading 'resources' on Instance uuid 1056fc6e-af1e-4d63-a9ce-9ade4dd73891 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.438736] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.438948] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.440189] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e17c6455-4d6a-471d-b45b-7698c542ac6f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.447218] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1674.447218] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52801dfb-1fde-7605-2120-8a7b3dd9af6c" [ 1674.447218] env[62816]: _type = "Task" [ 1674.447218] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.457296] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52801dfb-1fde-7605-2120-8a7b3dd9af6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.483862] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1674.506669] env[62816]: INFO nova.scheduler.client.report [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Deleted allocations for instance 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd [ 1674.522070] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1674.522319] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1674.522479] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1674.522660] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1674.522806] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1674.522954] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1674.523183] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1674.524036] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1674.524036] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1674.524036] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1674.524036] env[62816]: DEBUG nova.virt.hardware [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1674.525301] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6931b91b-a2d2-467a-bd79-413ae3d70dc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.539275] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ea2e48-f288-4696-b5ae-657f8d1027bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.741619] env[62816]: DEBUG nova.compute.manager [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Received event network-changed-3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1674.741919] env[62816]: DEBUG nova.compute.manager [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Refreshing instance network info cache due to event network-changed-3dd38577-9c08-47e5-afe3-ca2e34af1424. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1674.742190] env[62816]: DEBUG oslo_concurrency.lockutils [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] Acquiring lock "refresh_cache-a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.749540] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1674.783625] env[62816]: DEBUG oslo_concurrency.lockutils [None req-328e4171-15aa-4f3e-bab3-bc70e058173d tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.624s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.823905] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Releasing lock "refresh_cache-a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.823905] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Instance network_info: |[{"id": "3dd38577-9c08-47e5-afe3-ca2e34af1424", "address": "fa:16:3e:4f:b1:4d", "network": {"id": "9b46be99-a7c1-472d-a8c1-5c2d0ec606fa", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-299442947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f35e68446eb94d5399de61a2ee822a2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dd38577-9c", "ovs_interfaceid": "3dd38577-9c08-47e5-afe3-ca2e34af1424", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1674.824303] env[62816]: DEBUG oslo_concurrency.lockutils [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] Acquired lock "refresh_cache-a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.824382] env[62816]: DEBUG nova.network.neutron [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Refreshing network info cache for port 3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1674.825623] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4f:b1:4d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4e02e98f-44ce-42b7-a3ac-4034fae5d127', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3dd38577-9c08-47e5-afe3-ca2e34af1424', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1674.840156] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Creating folder: Project (f35e68446eb94d5399de61a2ee822a2a). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1674.844958] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a3090dc-7796-4dad-96d2-a89915b4f6cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.856445] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Created folder: Project (f35e68446eb94d5399de61a2ee822a2a) in parent group-v370905. [ 1674.856776] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Creating folder: Instances. Parent ref: group-v371106. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1674.856939] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-08bde431-efea-4857-a3e8-499b1717253b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.868089] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Created folder: Instances in parent group-v371106. [ 1674.868089] env[62816]: DEBUG oslo.service.loopingcall [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1674.868089] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1674.868089] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-afbc6758-4723-46d7-be3b-83814a88bf34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.894433] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1674.894433] env[62816]: value = "task-1788717" [ 1674.894433] env[62816]: _type = "Task" [ 1674.894433] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.904866] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788717, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.959349] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52801dfb-1fde-7605-2120-8a7b3dd9af6c, 'name': SearchDatastore_Task, 'duration_secs': 0.012024} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.960734] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23c6a0b0-123d-4a5c-9be0-c4ac23590df3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.965905] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1674.965905] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5213c042-abe3-f0d3-6742-f359ba69c476" [ 1674.965905] env[62816]: _type = "Task" [ 1674.965905] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.976971] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5213c042-abe3-f0d3-6742-f359ba69c476, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.021517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-06f72199-eb9b-495b-a6a0-9b4e352c0823 tempest-MigrationsAdminTest-666052295 tempest-MigrationsAdminTest-666052295-project-member] Lock "3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.030s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.276974] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.299939] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67776b30-ea85-43f5-87d1-304267fe652d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.310480] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082570a0-cd7b-4cdb-bba0-cc739718600c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.349494] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e886da5c-4739-414a-b7c5-723360cb7f15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.357900] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca254d6-7ad5-44c0-9ece-03444a06d86c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.371792] env[62816]: DEBUG nova.compute.provider_tree [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.405178] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788717, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.429223] env[62816]: DEBUG nova.network.neutron [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Successfully updated port: 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1675.479499] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5213c042-abe3-f0d3-6742-f359ba69c476, 'name': SearchDatastore_Task, 'duration_secs': 0.015861} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.482057] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.482499] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 8ccce660-6c41-412d-99ac-65ca7915d728/8ccce660-6c41-412d-99ac-65ca7915d728.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1675.482931] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3241f653-240b-48a5-b457-cf56ce5cd6c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.489783] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1675.489783] env[62816]: value = "task-1788718" [ 1675.489783] env[62816]: _type = "Task" [ 1675.489783] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.498013] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788718, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.674030] env[62816]: INFO nova.compute.manager [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Rescuing [ 1675.674030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.674030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.674237] env[62816]: DEBUG nova.network.neutron [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1675.715481] env[62816]: DEBUG nova.network.neutron [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Updated VIF entry in instance network info cache for port 3dd38577-9c08-47e5-afe3-ca2e34af1424. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1675.716493] env[62816]: DEBUG nova.network.neutron [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Updating instance_info_cache with network_info: [{"id": "3dd38577-9c08-47e5-afe3-ca2e34af1424", "address": "fa:16:3e:4f:b1:4d", "network": {"id": "9b46be99-a7c1-472d-a8c1-5c2d0ec606fa", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-299442947-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f35e68446eb94d5399de61a2ee822a2a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4e02e98f-44ce-42b7-a3ac-4034fae5d127", "external-id": "nsx-vlan-transportzone-874", "segmentation_id": 874, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3dd38577-9c", "ovs_interfaceid": "3dd38577-9c08-47e5-afe3-ca2e34af1424", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.875814] env[62816]: DEBUG nova.scheduler.client.report [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1675.907619] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788717, 'name': CreateVM_Task, 'duration_secs': 0.605904} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.907619] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1675.908380] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.908636] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.909177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1675.909648] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbe34946-fcc1-4544-9302-0f8792a70192 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.916921] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1675.916921] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f38be4-dd8b-660a-66c0-d067439b6520" [ 1675.916921] env[62816]: _type = "Task" [ 1675.916921] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.927083] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f38be4-dd8b-660a-66c0-d067439b6520, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.936044] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.936214] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.936362] env[62816]: DEBUG nova.network.neutron [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1676.004918] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788718, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.224945] env[62816]: DEBUG oslo_concurrency.lockutils [req-f10aa0be-5ac5-450a-8e66-a22066700b99 req-3b52791f-f62e-43f2-86c6-e9c546da2b9a service nova] Releasing lock "refresh_cache-a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.383709] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.947s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.387105] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.931s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.388484] env[62816]: DEBUG nova.objects.instance [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lazy-loading 'resources' on Instance uuid 9c246982-b215-46c1-9cd3-63907a515086 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1676.426108] env[62816]: INFO nova.scheduler.client.report [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted allocations for instance 1056fc6e-af1e-4d63-a9ce-9ade4dd73891 [ 1676.434185] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f38be4-dd8b-660a-66c0-d067439b6520, 'name': SearchDatastore_Task, 'duration_secs': 0.064423} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.434626] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.434893] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1676.435187] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.438813] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.439112] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1676.444415] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-18eb3be9-a24b-4454-9bbd-4be421ecf51e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.452501] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1676.452726] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1676.453496] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cd130dc-f985-474c-aa21-95b4e8508bde {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.460122] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1676.460122] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52715b7f-f7e1-e092-7bed-caff33ffb260" [ 1676.460122] env[62816]: _type = "Task" [ 1676.460122] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.471061] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52715b7f-f7e1-e092-7bed-caff33ffb260, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.489812] env[62816]: DEBUG nova.network.neutron [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Updating instance_info_cache with network_info: [{"id": "7a7060d4-14aa-43c8-9359-52512eee6df8", "address": "fa:16:3e:2b:df:f3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a7060d4-14", "ovs_interfaceid": "7a7060d4-14aa-43c8-9359-52512eee6df8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.496324] env[62816]: DEBUG nova.network.neutron [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1676.504561] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788718, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.734341} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.504839] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 8ccce660-6c41-412d-99ac-65ca7915d728/8ccce660-6c41-412d-99ac-65ca7915d728.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1676.505085] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1676.505749] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cffb2ea0-180e-47ef-9b12-15f00d6014a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.513566] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1676.513566] env[62816]: value = "task-1788719" [ 1676.513566] env[62816]: _type = "Task" [ 1676.513566] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.525920] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.718806] env[62816]: DEBUG nova.network.neutron [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating instance_info_cache with network_info: [{"id": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "address": "fa:16:3e:55:1f:0b", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cd3caf0-0f", "ovs_interfaceid": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.831204] env[62816]: DEBUG nova.compute.manager [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Received event network-vif-plugged-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1676.831628] env[62816]: DEBUG oslo_concurrency.lockutils [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.831701] env[62816]: DEBUG oslo_concurrency.lockutils [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.831896] env[62816]: DEBUG oslo_concurrency.lockutils [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.832804] env[62816]: DEBUG nova.compute.manager [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] No waiting events found dispatching network-vif-plugged-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1676.832804] env[62816]: WARNING nova.compute.manager [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Received unexpected event network-vif-plugged-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 for instance with vm_state building and task_state spawning. [ 1676.832804] env[62816]: DEBUG nova.compute.manager [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Received event network-changed-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1676.832804] env[62816]: DEBUG nova.compute.manager [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Refreshing instance network info cache due to event network-changed-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1676.832804] env[62816]: DEBUG oslo_concurrency.lockutils [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] Acquiring lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.937780] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c948fb38-86a1-4e5f-a040-7a759e8d13b7 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "1056fc6e-af1e-4d63-a9ce-9ade4dd73891" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.407s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.977495] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52715b7f-f7e1-e092-7bed-caff33ffb260, 'name': SearchDatastore_Task, 'duration_secs': 0.025599} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.978326] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-159900c3-cd7b-47b6-82bd-618f888340e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.986404] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1676.986404] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ebb4e3-cf62-98ee-073a-9146671fd1bf" [ 1676.986404] env[62816]: _type = "Task" [ 1676.986404] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.991668] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-543d69d2-0694-4d57-bbae-f8851ff0f0dc" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.999886] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ebb4e3-cf62-98ee-073a-9146671fd1bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.025676] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.466687} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.028145] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1677.028952] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ca0abb-d4bf-4400-a53c-7037cf21fc37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.061208] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 8ccce660-6c41-412d-99ac-65ca7915d728/8ccce660-6c41-412d-99ac-65ca7915d728.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1677.068672] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a879306-64c5-485f-a8bb-666e22029b84 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.086708] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1677.087382] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5a16f125-1ec1-4666-b1d7-e13f0527a500 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.092900] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1677.092900] env[62816]: value = "task-1788720" [ 1677.092900] env[62816]: _type = "Task" [ 1677.092900] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.097679] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1677.097679] env[62816]: value = "task-1788721" [ 1677.097679] env[62816]: _type = "Task" [ 1677.097679] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.107568] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788720, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.112358] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788721, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.153940] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "4ab07a21-2685-42bc-af13-b95473993d6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.154193] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "4ab07a21-2685-42bc-af13-b95473993d6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.222502] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.222836] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Instance network_info: |[{"id": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "address": "fa:16:3e:55:1f:0b", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cd3caf0-0f", "ovs_interfaceid": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1677.223747] env[62816]: DEBUG oslo_concurrency.lockutils [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] Acquired lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.224027] env[62816]: DEBUG nova.network.neutron [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Refreshing network info cache for port 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1677.225321] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:1f:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e238ac23-819b-452f-9015-52922e45efd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1677.233747] env[62816]: DEBUG oslo.service.loopingcall [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1677.237224] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1677.237778] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4365db12-b65c-4e2b-b675-052bad0997c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.258112] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a97df70-e2b8-4800-a218-13164c4b2972 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.267978] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f41ee9-a01c-480b-8162-b7c8a387aa2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.271338] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1677.271338] env[62816]: value = "task-1788722" [ 1677.271338] env[62816]: _type = "Task" [ 1677.271338] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.302705] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27a41f5-4b1f-4cbb-803f-b799d8caa39e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.308418] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788722, 'name': CreateVM_Task} progress is 15%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.313330] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e17a797-0533-4173-8fa4-6b9867534040 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.329420] env[62816]: DEBUG nova.compute.provider_tree [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.504054] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ebb4e3-cf62-98ee-073a-9146671fd1bf, 'name': SearchDatastore_Task, 'duration_secs': 0.040744} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.504054] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.504054] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a5f50ca4-4648-4f33-a6d3-18cfc4fd3441/a5f50ca4-4648-4f33-a6d3-18cfc4fd3441.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1677.505462] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b4680293-77b9-43a8-990a-ecf32d69b40f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.515023] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1677.515023] env[62816]: value = "task-1788723" [ 1677.515023] env[62816]: _type = "Task" [ 1677.515023] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.525135] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.608658] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788720, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.613642] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788721, 'name': PowerOffVM_Task, 'duration_secs': 0.196952} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.614026] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1677.615639] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6d8585d-2d70-4811-8b80-97331065ad23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.639768] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86945cef-2e91-4bf9-9548-dae8a32ce84b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.663426] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1677.673561] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1677.673834] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e78b5e1-ebd7-49ee-96a8-2feb0346a58f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.680505] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1677.680505] env[62816]: value = "task-1788724" [ 1677.680505] env[62816]: _type = "Task" [ 1677.680505] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.688575] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788724, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.780439] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788722, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.833049] env[62816]: DEBUG nova.scheduler.client.report [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1677.843944] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "0a1a8539-940a-4a17-9826-82736be41892" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.844220] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0a1a8539-940a-4a17-9826-82736be41892" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.844430] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "0a1a8539-940a-4a17-9826-82736be41892-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.844609] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0a1a8539-940a-4a17-9826-82736be41892-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.844773] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0a1a8539-940a-4a17-9826-82736be41892-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.847085] env[62816]: INFO nova.compute.manager [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Terminating instance [ 1677.849032] env[62816]: DEBUG nova.compute.manager [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1677.849185] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1677.849998] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bf6c14-0218-4a1a-b8f5-f29c57065914 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.858899] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1677.859135] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1881969b-24a4-47b8-858f-c513a0a8f38b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.865215] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1677.865215] env[62816]: value = "task-1788725" [ 1677.865215] env[62816]: _type = "Task" [ 1677.865215] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.873153] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788725, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.995165] env[62816]: DEBUG nova.network.neutron [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updated VIF entry in instance network info cache for port 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1677.995569] env[62816]: DEBUG nova.network.neutron [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating instance_info_cache with network_info: [{"id": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "address": "fa:16:3e:55:1f:0b", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cd3caf0-0f", "ovs_interfaceid": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.025547] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.106882] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788720, 'name': ReconfigVM_Task, 'duration_secs': 0.538279} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.107346] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 8ccce660-6c41-412d-99ac-65ca7915d728/8ccce660-6c41-412d-99ac-65ca7915d728.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1678.108465] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3997086c-ffce-42d3-90cc-2336e9b78093 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.116308] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1678.116308] env[62816]: value = "task-1788726" [ 1678.116308] env[62816]: _type = "Task" [ 1678.116308] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.128760] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788726, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.197305] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.197305] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1678.197531] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1678.197848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.198041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.198503] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1678.198602] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c9eab8a5-01d9-4077-b99e-66b39ca926a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.213672] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1678.215048] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1678.217117] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09180f6c-09a4-4291-91fe-5722c44d2a61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.224415] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1678.224415] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522c77a7-5026-063b-a8af-67064feffb3f" [ 1678.224415] env[62816]: _type = "Task" [ 1678.224415] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.233976] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522c77a7-5026-063b-a8af-67064feffb3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.286612] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788722, 'name': CreateVM_Task, 'duration_secs': 0.785073} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.287009] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1678.288064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.288298] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1678.288784] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1678.289280] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a766ff42-151d-46c6-942d-1ffe6bde345c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.296392] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1678.296392] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525cd57a-083c-36a7-d416-8b987dcaf9a5" [ 1678.296392] env[62816]: _type = "Task" [ 1678.296392] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.309611] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525cd57a-083c-36a7-d416-8b987dcaf9a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.338913] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.952s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.342414] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.130s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.344036] env[62816]: INFO nova.compute.claims [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1678.359574] env[62816]: INFO nova.scheduler.client.report [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Deleted allocations for instance 9c246982-b215-46c1-9cd3-63907a515086 [ 1678.379026] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788725, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.498199] env[62816]: DEBUG oslo_concurrency.lockutils [req-15850c71-54da-4296-91ec-6573d67b1c5b req-c41bf5dc-7826-4119-bd05-002639d5eacb service nova] Releasing lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.526163] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788723, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.949184} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.526514] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a5f50ca4-4648-4f33-a6d3-18cfc4fd3441/a5f50ca4-4648-4f33-a6d3-18cfc4fd3441.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1678.526728] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1678.527013] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-785f6520-7985-4070-a863-aaffc4520763 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.533257] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1678.533257] env[62816]: value = "task-1788727" [ 1678.533257] env[62816]: _type = "Task" [ 1678.533257] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.542964] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.627810] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788726, 'name': Rename_Task, 'duration_secs': 0.26968} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.628118] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1678.628374] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-69fcdc79-22b0-4e23-88ca-64c9a8c4f5e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.634494] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1678.634494] env[62816]: value = "task-1788728" [ 1678.634494] env[62816]: _type = "Task" [ 1678.634494] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.641941] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.734384] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522c77a7-5026-063b-a8af-67064feffb3f, 'name': SearchDatastore_Task, 'duration_secs': 0.061865} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.735322] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-863c2d6f-3edf-4600-b5d0-2e27e81a09e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.740771] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1678.740771] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52dd705e-6dd7-f458-a39e-95963188f4db" [ 1678.740771] env[62816]: _type = "Task" [ 1678.740771] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.748676] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd705e-6dd7-f458-a39e-95963188f4db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.806213] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525cd57a-083c-36a7-d416-8b987dcaf9a5, 'name': SearchDatastore_Task, 'duration_secs': 0.053667} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.808064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1678.808292] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1678.808511] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1678.867570] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7c1b1ad6-3898-4b83-8917-25856bad2b6e tempest-ImagesOneServerTestJSON-1722546866 tempest-ImagesOneServerTestJSON-1722546866-project-member] Lock "9c246982-b215-46c1-9cd3-63907a515086" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.560s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.878273] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788725, 'name': PowerOffVM_Task, 'duration_secs': 0.547877} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.878273] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1678.878448] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1678.878817] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdbecbcc-aa99-4c87-9ccc-a7e69fb8f499 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.975788] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1678.975788] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1678.975788] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleting the datastore file [datastore1] 0a1a8539-940a-4a17-9826-82736be41892 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1678.975788] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5bad8394-61e7-4bf8-b7f7-701a80b6e93c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.980629] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1678.980629] env[62816]: value = "task-1788730" [ 1678.980629] env[62816]: _type = "Task" [ 1678.980629] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.993258] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.045659] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094668} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.045957] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1679.046790] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ed4ed1-1ea6-4fd7-977b-b3f49cc7a5c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.071189] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] a5f50ca4-4648-4f33-a6d3-18cfc4fd3441/a5f50ca4-4648-4f33-a6d3-18cfc4fd3441.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1679.071547] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16b569a9-2713-4b9c-a981-d4e57f1b1441 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.091953] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1679.091953] env[62816]: value = "task-1788731" [ 1679.091953] env[62816]: _type = "Task" [ 1679.091953] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.100469] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788731, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.145998] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788728, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.251456] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd705e-6dd7-f458-a39e-95963188f4db, 'name': SearchDatastore_Task, 'duration_secs': 0.00947} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.251755] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.252028] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. {{(pid=62816) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1679.252344] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.252589] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1679.253378] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-01e16ee1-1105-4e26-8f05-22276a05ce1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.255166] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ad53a63c-01e4-4f4a-a700-67c014545df3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.262381] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1679.262381] env[62816]: value = "task-1788732" [ 1679.262381] env[62816]: _type = "Task" [ 1679.262381] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.267242] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1679.267978] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1679.269373] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-977595e7-d6f3-47ae-a4ec-12ac3faf9f5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.276971] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788732, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.280437] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1679.280437] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523a26bd-2ed0-90b8-19e0-3349df378c7f" [ 1679.280437] env[62816]: _type = "Task" [ 1679.280437] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.292239] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523a26bd-2ed0-90b8-19e0-3349df378c7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.493530] env[62816]: DEBUG oslo_vmware.api [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216655} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.494204] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1679.494676] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1679.495028] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1679.495356] env[62816]: INFO nova.compute.manager [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1679.495810] env[62816]: DEBUG oslo.service.loopingcall [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.498018] env[62816]: DEBUG nova.compute.manager [-] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1679.498018] env[62816]: DEBUG nova.network.neutron [-] [instance: 0a1a8539-940a-4a17-9826-82736be41892] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1679.602636] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788731, 'name': ReconfigVM_Task, 'duration_secs': 0.290584} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.606049] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Reconfigured VM instance instance-00000046 to attach disk [datastore1] a5f50ca4-4648-4f33-a6d3-18cfc4fd3441/a5f50ca4-4648-4f33-a6d3-18cfc4fd3441.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1679.609697] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5eef45f2-b356-444d-a1e8-f59057219846 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.616226] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1679.616226] env[62816]: value = "task-1788733" [ 1679.616226] env[62816]: _type = "Task" [ 1679.616226] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.629817] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788733, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.634879] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b52b9cd-c455-47df-a356-c780d31eeaed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.644487] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8a382e-4810-421e-8518-eed9ea06b3b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.652707] env[62816]: DEBUG oslo_vmware.api [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788728, 'name': PowerOnVM_Task, 'duration_secs': 0.585313} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.653539] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1679.653890] env[62816]: INFO nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Took 11.13 seconds to spawn the instance on the hypervisor. [ 1679.654209] env[62816]: DEBUG nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1679.655301] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc2a297-5d0e-4277-9cb7-d0e0f51b12fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.685982] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b167c685-9507-429a-84d7-a6b6b244d766 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.698690] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae21b00-3a93-4ca0-9941-e9a95fcf69db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.714661] env[62816]: DEBUG nova.compute.provider_tree [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.773409] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788732, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.794923] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523a26bd-2ed0-90b8-19e0-3349df378c7f, 'name': SearchDatastore_Task, 'duration_secs': 0.012715} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.796201] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d18d075-6826-47fe-9662-81ed3e45f3ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.803883] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1679.803883] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528a289c-2fa7-4a7a-e2a7-e44b1b004107" [ 1679.803883] env[62816]: _type = "Task" [ 1679.803883] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.817902] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528a289c-2fa7-4a7a-e2a7-e44b1b004107, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.917030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.917374] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.917598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.917778] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.917968] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.921324] env[62816]: INFO nova.compute.manager [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Terminating instance [ 1679.923194] env[62816]: DEBUG nova.compute.manager [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1679.925191] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1679.925191] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c429e395-ffec-4366-a32a-1cb9715c9ab8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.935163] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1679.935163] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5015ad64-565a-407d-947d-7cd235886d30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.942985] env[62816]: DEBUG oslo_vmware.api [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1679.942985] env[62816]: value = "task-1788734" [ 1679.942985] env[62816]: _type = "Task" [ 1679.942985] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.954650] env[62816]: DEBUG oslo_vmware.api [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.030086] env[62816]: DEBUG nova.compute.manager [req-6187a833-352c-4e5b-83f2-b2e1be409895 req-fc5500d7-fb5f-48a3-8068-d26acbfe8a6f service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Received event network-vif-deleted-c6e6822e-53bd-4c81-b715-3c3d6bef6f45 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1680.030086] env[62816]: INFO nova.compute.manager [req-6187a833-352c-4e5b-83f2-b2e1be409895 req-fc5500d7-fb5f-48a3-8068-d26acbfe8a6f service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Neutron deleted interface c6e6822e-53bd-4c81-b715-3c3d6bef6f45; detaching it from the instance and deleting it from the info cache [ 1680.030086] env[62816]: DEBUG nova.network.neutron [req-6187a833-352c-4e5b-83f2-b2e1be409895 req-fc5500d7-fb5f-48a3-8068-d26acbfe8a6f service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.128426] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788733, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.208034] env[62816]: INFO nova.compute.manager [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Took 41.66 seconds to build instance. [ 1680.218260] env[62816]: DEBUG nova.scheduler.client.report [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1680.273862] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788732, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.775738} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.274089] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. [ 1680.274947] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3a76d5-1382-4f73-920d-4d450d73884d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.303352] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1680.305040] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e4eaa15-cb31-4357-b9db-d49aafe916d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.331643] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528a289c-2fa7-4a7a-e2a7-e44b1b004107, 'name': SearchDatastore_Task, 'duration_secs': 0.056542} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.333079] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.333350] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d03ed540-5c20-4bcb-ac7e-eec8c09e4451/d03ed540-5c20-4bcb-ac7e-eec8c09e4451.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1680.333680] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1680.333680] env[62816]: value = "task-1788735" [ 1680.333680] env[62816]: _type = "Task" [ 1680.333680] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.333874] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ed7cd11e-c03e-46d9-8513-20da16e6ffc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.346130] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788735, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.347635] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1680.347635] env[62816]: value = "task-1788736" [ 1680.347635] env[62816]: _type = "Task" [ 1680.347635] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.356066] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788736, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.455095] env[62816]: DEBUG oslo_vmware.api [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788734, 'name': PowerOffVM_Task, 'duration_secs': 0.239476} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.455405] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1680.455593] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1680.455864] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57a302b7-167b-4f4b-a2a2-8d98391ea888 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.476293] env[62816]: DEBUG nova.network.neutron [-] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.540328] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80d71730-3eaa-4c74-99fd-641d234ebb87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.550021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5665d5-f8d0-4271-8736-7a3bb9619992 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.576135] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1680.576429] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1680.576622] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleting the datastore file [datastore1] 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1680.591763] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d16a99cf-c642-468f-ba77-d33d44c5c822 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.593573] env[62816]: DEBUG nova.compute.manager [req-6187a833-352c-4e5b-83f2-b2e1be409895 req-fc5500d7-fb5f-48a3-8068-d26acbfe8a6f service nova] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Detach interface failed, port_id=c6e6822e-53bd-4c81-b715-3c3d6bef6f45, reason: Instance 0a1a8539-940a-4a17-9826-82736be41892 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1680.600768] env[62816]: DEBUG oslo_vmware.api [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for the task: (returnval){ [ 1680.600768] env[62816]: value = "task-1788738" [ 1680.600768] env[62816]: _type = "Task" [ 1680.600768] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.613853] env[62816]: DEBUG oslo_vmware.api [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.628052] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788733, 'name': Rename_Task, 'duration_secs': 0.854267} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.628451] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1680.628743] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a00fa37-75de-4c0a-bf16-3355bcbab5d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.636138] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1680.636138] env[62816]: value = "task-1788739" [ 1680.636138] env[62816]: _type = "Task" [ 1680.636138] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.649427] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.710380] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7e1c0950-f6cc-4a34-b2e3-cd01f4f6b6f0 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "8ccce660-6c41-412d-99ac-65ca7915d728" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.183s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.724662] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.725298] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1680.728842] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.994s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.728842] env[62816]: DEBUG nova.objects.instance [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lazy-loading 'resources' on Instance uuid 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1680.851503] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788735, 'name': ReconfigVM_Task, 'duration_secs': 0.341844} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.855557] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1680.856942] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c24f64-49c1-428d-a0c0-d9b495182e30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.866420] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788736, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467927} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.884450] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d03ed540-5c20-4bcb-ac7e-eec8c09e4451/d03ed540-5c20-4bcb-ac7e-eec8c09e4451.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1680.884977] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1680.892101] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-597e39d7-6f88-4814-8708-1d0f66ccdc7b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.893788] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adad286c-9bfd-415e-abda-a3fa5f58fed2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.914159] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1680.914159] env[62816]: value = "task-1788740" [ 1680.914159] env[62816]: _type = "Task" [ 1680.914159] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.914612] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1680.914612] env[62816]: value = "task-1788741" [ 1680.914612] env[62816]: _type = "Task" [ 1680.914612] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.932283] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788740, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.935925] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.979124] env[62816]: INFO nova.compute.manager [-] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Took 1.48 seconds to deallocate network for instance. [ 1681.114964] env[62816]: DEBUG oslo_vmware.api [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Task: {'id': task-1788738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317879} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.115291] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1681.115482] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1681.115722] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1681.115935] env[62816]: INFO nova.compute.manager [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1681.117142] env[62816]: DEBUG oslo.service.loopingcall [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1681.117142] env[62816]: DEBUG nova.compute.manager [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1681.117142] env[62816]: DEBUG nova.network.neutron [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1681.151020] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788739, 'name': PowerOnVM_Task} progress is 92%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.233264] env[62816]: DEBUG nova.compute.utils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1681.241551] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1681.241551] env[62816]: DEBUG nova.network.neutron [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1681.338071] env[62816]: DEBUG nova.policy [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13a3267ab64e4640bf00a0e5dbaaf044', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2d830983a3c14168b8f0b67478f27589', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1681.430830] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788740, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067716} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.435071] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1681.435071] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788741, 'name': ReconfigVM_Task, 'duration_secs': 0.182289} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.438081] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d345d91-983f-4fa9-ae6c-7cee91b4cd9e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.441441] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1681.441441] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e4428ca-ff45-4d5d-8113-8f34dae90877 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.469257] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] d03ed540-5c20-4bcb-ac7e-eec8c09e4451/d03ed540-5c20-4bcb-ac7e-eec8c09e4451.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1681.469934] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1681.469934] env[62816]: value = "task-1788742" [ 1681.469934] env[62816]: _type = "Task" [ 1681.469934] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.473962] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f97ad5e8-19c1-4ffd-b295-126c6361f03b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.496386] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.496612] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788742, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.498101] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1681.498101] env[62816]: value = "task-1788743" [ 1681.498101] env[62816]: _type = "Task" [ 1681.498101] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.510601] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788743, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.593757] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2167ce5-3c95-4146-a00b-aa309911c782 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.601262] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8d0694-3cc3-4adf-96d2-52df41977d8f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.632951] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad23360-9682-4f4b-81da-dc7d9e5c8e9d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.644805] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736c9098-fbd5-4d7b-94f2-f347dc6ec0d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.652419] env[62816]: DEBUG oslo_vmware.api [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788739, 'name': PowerOnVM_Task, 'duration_secs': 0.551507} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.653141] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1681.653431] env[62816]: INFO nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1681.653695] env[62816]: DEBUG nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1681.654520] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8a746b-19f6-4bda-96c9-1ef54071534a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.666794] env[62816]: DEBUG nova.compute.provider_tree [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1681.740176] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1681.916122] env[62816]: DEBUG nova.network.neutron [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Successfully created port: 479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1682.000437] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788742, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.010566] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788743, 'name': ReconfigVM_Task, 'duration_secs': 0.502071} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.010869] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfigured VM instance instance-00000047 to attach disk [datastore1] d03ed540-5c20-4bcb-ac7e-eec8c09e4451/d03ed540-5c20-4bcb-ac7e-eec8c09e4451.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1682.011549] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17088542-7956-41a2-9170-45ce8a01b3d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.019286] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1682.019286] env[62816]: value = "task-1788744" [ 1682.019286] env[62816]: _type = "Task" [ 1682.019286] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.030825] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788744, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.174180] env[62816]: DEBUG nova.scheduler.client.report [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1682.191912] env[62816]: INFO nova.compute.manager [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Took 28.37 seconds to build instance. [ 1682.259998] env[62816]: DEBUG nova.compute.manager [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Received event network-vif-deleted-f2f2e184-1921-455c-b435-44548769245c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1682.260737] env[62816]: INFO nova.compute.manager [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Neutron deleted interface f2f2e184-1921-455c-b435-44548769245c; detaching it from the instance and deleting it from the info cache [ 1682.260974] env[62816]: DEBUG nova.network.neutron [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.262702] env[62816]: DEBUG nova.network.neutron [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.502739] env[62816]: DEBUG oslo_vmware.api [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788742, 'name': PowerOnVM_Task, 'duration_secs': 0.809669} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.502998] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1682.506305] env[62816]: DEBUG nova.compute.manager [None req-7b3170b9-6f6d-4d67-94ee-eafc10fd2c7e tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1682.507114] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66150171-f18b-4776-9ca9-b81a0844db49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.530962] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788744, 'name': Rename_Task, 'duration_secs': 0.291831} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.532893] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1682.533194] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d78f2e1b-0a41-4d53-93c1-50c1693809d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.541028] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1682.541028] env[62816]: value = "task-1788745" [ 1682.541028] env[62816]: _type = "Task" [ 1682.541028] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.549640] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788745, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.686243] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.958s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.689898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.412s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.690037] env[62816]: INFO nova.compute.claims [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1682.696384] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8975d4b-9d17-4373-b8cd-953048391085 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.890s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.714050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.714929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.715174] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.715367] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.715531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.717857] env[62816]: INFO nova.compute.manager [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Terminating instance [ 1682.719919] env[62816]: DEBUG nova.compute.manager [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1682.720122] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1682.720971] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ebc5e2-b57d-4e95-a923-0a52b0b5001e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.724395] env[62816]: INFO nova.scheduler.client.report [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted allocations for instance 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac [ 1682.730459] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.730874] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-326f94e2-0665-4ad4-9d4f-e6b74dbfc93a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.742698] env[62816]: DEBUG oslo_vmware.api [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1682.742698] env[62816]: value = "task-1788746" [ 1682.742698] env[62816]: _type = "Task" [ 1682.742698] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.749638] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1682.756902] env[62816]: DEBUG oslo_vmware.api [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.765479] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b451980-b8b9-41d5-81e7-155e2591194d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.767850] env[62816]: INFO nova.compute.manager [-] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Took 1.65 seconds to deallocate network for instance. [ 1682.776466] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53488ed6-cded-4d75-8e2b-983624c8a996 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.800376] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1682.800376] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1682.800376] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1682.800376] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1682.800376] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1682.800710] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1682.800710] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1682.800785] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1682.800944] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1682.801167] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1682.801353] env[62816]: DEBUG nova.virt.hardware [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1682.802485] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edede65-9b02-46af-8d43-c5f6ab1d821b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.818205] env[62816]: DEBUG nova.compute.manager [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Detach interface failed, port_id=f2f2e184-1921-455c-b435-44548769245c, reason: Instance 679cd9a3-2ed6-451f-b934-ba7738913959 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1682.818475] env[62816]: DEBUG nova.compute.manager [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Received event network-changed-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1682.818659] env[62816]: DEBUG nova.compute.manager [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Refreshing instance network info cache due to event network-changed-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1682.818877] env[62816]: DEBUG oslo_concurrency.lockutils [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] Acquiring lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.819099] env[62816]: DEBUG oslo_concurrency.lockutils [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] Acquired lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.819384] env[62816]: DEBUG nova.network.neutron [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Refreshing network info cache for port 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1682.827969] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81a8279-6e73-4446-a0d4-21d9628c7f8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.052059] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788745, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.235457] env[62816]: DEBUG oslo_concurrency.lockutils [None req-63cc21b5-216c-4696-a721-06f8ca9aceb8 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.811s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1683.253567] env[62816]: DEBUG oslo_vmware.api [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788746, 'name': PowerOffVM_Task, 'duration_secs': 0.263761} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.253915] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.254112] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.254400] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d84003fd-35cf-44ab-8461-026717f4ceae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.274930] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.477138] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.477138] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.477138] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Deleting the datastore file [datastore1] a5f50ca4-4648-4f33-a6d3-18cfc4fd3441 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.477469] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52fde932-81e4-47a3-acda-e54fcbda7027 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.483933] env[62816]: DEBUG oslo_vmware.api [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for the task: (returnval){ [ 1683.483933] env[62816]: value = "task-1788751" [ 1683.483933] env[62816]: _type = "Task" [ 1683.483933] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.492069] env[62816]: DEBUG oslo_vmware.api [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788751, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.551163] env[62816]: DEBUG oslo_vmware.api [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1788745, 'name': PowerOnVM_Task, 'duration_secs': 0.735357} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.551392] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1683.551592] env[62816]: INFO nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Took 9.07 seconds to spawn the instance on the hypervisor. [ 1683.551763] env[62816]: DEBUG nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1683.552556] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dd4c28-6798-4110-8212-33b10e37ebfc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.898892] env[62816]: DEBUG nova.network.neutron [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Successfully updated port: 479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1683.949050] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b263841c-1015-4b53-a37e-de50605ef6e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.960978] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83323782-e894-4f76-952f-886d540f3c63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.009578] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68d56833-79a1-4a58-a491-4be14e54a0e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.017265] env[62816]: DEBUG oslo_vmware.api [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Task: {'id': task-1788751, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165964} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.019868] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1684.020074] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1684.020315] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1684.020430] env[62816]: INFO nova.compute.manager [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1684.020670] env[62816]: DEBUG oslo.service.loopingcall [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1684.023352] env[62816]: DEBUG nova.compute.manager [-] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1684.023440] env[62816]: DEBUG nova.network.neutron [-] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1684.025962] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2522c3e-53ab-49c5-8cb5-c56857e17345 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.040881] env[62816]: DEBUG nova.compute.provider_tree [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1684.072817] env[62816]: INFO nova.compute.manager [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Took 27.67 seconds to build instance. [ 1684.111508] env[62816]: DEBUG nova.network.neutron [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updated VIF entry in instance network info cache for port 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1684.111882] env[62816]: DEBUG nova.network.neutron [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updating instance_info_cache with network_info: [{"id": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "address": "fa:16:3e:70:bf:cc", "network": {"id": "603f3279-8e01-4266-9f62-9010696b0166", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-751649207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b396c0ce6aa41c386d0a1f57a155fd8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bef2124-4f", "ovs_interfaceid": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.235127] env[62816]: DEBUG nova.compute.manager [req-13420fe3-dcc3-4634-a8c2-f1207d0caa08 req-e2613cc7-e8cf-4599-84da-49204875b7dc service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Received event network-vif-plugged-479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1684.235765] env[62816]: DEBUG oslo_concurrency.lockutils [req-13420fe3-dcc3-4634-a8c2-f1207d0caa08 req-e2613cc7-e8cf-4599-84da-49204875b7dc service nova] Acquiring lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.236083] env[62816]: DEBUG oslo_concurrency.lockutils [req-13420fe3-dcc3-4634-a8c2-f1207d0caa08 req-e2613cc7-e8cf-4599-84da-49204875b7dc service nova] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.236413] env[62816]: DEBUG oslo_concurrency.lockutils [req-13420fe3-dcc3-4634-a8c2-f1207d0caa08 req-e2613cc7-e8cf-4599-84da-49204875b7dc service nova] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.236532] env[62816]: DEBUG nova.compute.manager [req-13420fe3-dcc3-4634-a8c2-f1207d0caa08 req-e2613cc7-e8cf-4599-84da-49204875b7dc service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] No waiting events found dispatching network-vif-plugged-479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1684.236717] env[62816]: WARNING nova.compute.manager [req-13420fe3-dcc3-4634-a8c2-f1207d0caa08 req-e2613cc7-e8cf-4599-84da-49204875b7dc service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Received unexpected event network-vif-plugged-479053d9-125e-49d2-94b6-1c48422ea761 for instance with vm_state building and task_state spawning. [ 1684.402094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "refresh_cache-3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.402231] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "refresh_cache-3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.402403] env[62816]: DEBUG nova.network.neutron [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1684.544026] env[62816]: DEBUG nova.scheduler.client.report [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1684.575614] env[62816]: DEBUG oslo_concurrency.lockutils [None req-07923d35-9b7f-4889-b302-4f778e3653fb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.185s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.615615] env[62816]: DEBUG oslo_concurrency.lockutils [req-28911a8f-bb80-4dd9-a9a8-ed86a948d16a req-8c0188ab-a456-4417-9f3e-aa2fb367a575 service nova] Releasing lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.839483] env[62816]: DEBUG nova.network.neutron [-] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.971019] env[62816]: DEBUG nova.network.neutron [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1685.052076] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.052732] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1685.056536] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.859s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.058830] env[62816]: INFO nova.compute.claims [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1685.198066] env[62816]: DEBUG nova.network.neutron [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Updating instance_info_cache with network_info: [{"id": "479053d9-125e-49d2-94b6-1c48422ea761", "address": "fa:16:3e:fa:a2:c0", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap479053d9-12", "ovs_interfaceid": "479053d9-125e-49d2-94b6-1c48422ea761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.227336] env[62816]: DEBUG nova.compute.manager [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Received event network-changed-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1685.227336] env[62816]: DEBUG nova.compute.manager [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Refreshing instance network info cache due to event network-changed-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1685.227336] env[62816]: DEBUG oslo_concurrency.lockutils [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] Acquiring lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1685.227336] env[62816]: DEBUG oslo_concurrency.lockutils [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] Acquired lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1685.227336] env[62816]: DEBUG nova.network.neutron [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Refreshing network info cache for port 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1685.347125] env[62816]: INFO nova.compute.manager [-] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Took 1.32 seconds to deallocate network for instance. [ 1685.566115] env[62816]: DEBUG nova.compute.utils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1685.576764] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1685.577433] env[62816]: DEBUG nova.network.neutron [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1685.636756] env[62816]: DEBUG nova.policy [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2d03b7ebab74997be1049ceb87d97b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35d54c38b25c446cb8f44f2d7f419db0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1685.701610] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "refresh_cache-3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.701954] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Instance network_info: |[{"id": "479053d9-125e-49d2-94b6-1c48422ea761", "address": "fa:16:3e:fa:a2:c0", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap479053d9-12", "ovs_interfaceid": "479053d9-125e-49d2-94b6-1c48422ea761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1685.702467] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:a2:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '479053d9-125e-49d2-94b6-1c48422ea761', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1685.718724] env[62816]: DEBUG oslo.service.loopingcall [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1685.719391] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1685.719634] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7b8a9c3-d1df-433d-9851-43025e6e1cc7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.744759] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1685.744759] env[62816]: value = "task-1788752" [ 1685.744759] env[62816]: _type = "Task" [ 1685.744759] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.758024] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788752, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.856501] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.069602] env[62816]: DEBUG nova.network.neutron [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Successfully created port: 9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1686.077889] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1686.255111] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788752, 'name': CreateVM_Task, 'duration_secs': 0.495347} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.255111] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1686.255797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.255952] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.256374] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1686.256555] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3e6307-2785-4d25-bce8-a5316aa010f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.265452] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1686.265452] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52bf17df-d207-28f9-b3e0-1a53a7480a9a" [ 1686.265452] env[62816]: _type = "Task" [ 1686.265452] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.276740] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bf17df-d207-28f9-b3e0-1a53a7480a9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.277813] env[62816]: DEBUG nova.network.neutron [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updated VIF entry in instance network info cache for port 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1686.278194] env[62816]: DEBUG nova.network.neutron [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updating instance_info_cache with network_info: [{"id": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "address": "fa:16:3e:70:bf:cc", "network": {"id": "603f3279-8e01-4266-9f62-9010696b0166", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-751649207-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9b396c0ce6aa41c386d0a1f57a155fd8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3f695b6-65bc-45cc-a61d-3c38a14e5c0c", "external-id": "nsx-vlan-transportzone-559", "segmentation_id": 559, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9bef2124-4f", "ovs_interfaceid": "9bef2124-4f7f-4111-aa0d-0c2d63ad71c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1686.314434] env[62816]: DEBUG nova.compute.manager [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Received event network-changed-479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1686.314434] env[62816]: DEBUG nova.compute.manager [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Refreshing instance network info cache due to event network-changed-479053d9-125e-49d2-94b6-1c48422ea761. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1686.314666] env[62816]: DEBUG oslo_concurrency.lockutils [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] Acquiring lock "refresh_cache-3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.314768] env[62816]: DEBUG oslo_concurrency.lockutils [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] Acquired lock "refresh_cache-3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.314929] env[62816]: DEBUG nova.network.neutron [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Refreshing network info cache for port 479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1686.325790] env[62816]: INFO nova.compute.manager [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Rescuing [ 1686.326083] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.327020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.327020] env[62816]: DEBUG nova.network.neutron [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1686.442412] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074f9a41-7843-4a76-93a5-da4bf2f3f18b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.450872] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35485969-0439-4197-9258-920a1ae8c21b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.486093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60449af0-5038-41e8-a120-f4bd4115a4d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.493623] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751b9239-6b9c-4b5f-b4f7-273481621541 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.508517] env[62816]: DEBUG nova.compute.provider_tree [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1686.777209] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bf17df-d207-28f9-b3e0-1a53a7480a9a, 'name': SearchDatastore_Task, 'duration_secs': 0.03996} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.777209] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.777329] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1686.777479] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1686.778033] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1686.778033] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1686.778156] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4a0e980-0068-408a-9df9-14d7061b6867 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.780631] env[62816]: DEBUG oslo_concurrency.lockutils [req-39abd1de-aba9-4666-856c-c9716fcaf277 req-499e987e-2b95-4b50-aa3c-50557847e279 service nova] Releasing lock "refresh_cache-8ccce660-6c41-412d-99ac-65ca7915d728" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.789029] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1686.789029] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1686.790028] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf24d6fc-5a94-49dd-8e13-c2590f6308bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.795313] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1686.795313] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520bf409-8610-6c3c-0110-0ca6c7c757c1" [ 1686.795313] env[62816]: _type = "Task" [ 1686.795313] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.803441] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520bf409-8610-6c3c-0110-0ca6c7c757c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.013503] env[62816]: DEBUG nova.scheduler.client.report [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1687.094960] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1687.125475] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1687.125730] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1687.125877] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1687.126116] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1687.126275] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1687.126430] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1687.126628] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1687.126782] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1687.126974] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1687.127140] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1687.127344] env[62816]: DEBUG nova.virt.hardware [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1687.128305] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244539d6-952d-452b-a24b-4bf174fe427e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.138148] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbec2f72-f0dc-4215-aa97-194bc2baebd8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.241907] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "8ccce660-6c41-412d-99ac-65ca7915d728" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.242623] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "8ccce660-6c41-412d-99ac-65ca7915d728" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.242889] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "8ccce660-6c41-412d-99ac-65ca7915d728-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.243153] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "8ccce660-6c41-412d-99ac-65ca7915d728-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.247285] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "8ccce660-6c41-412d-99ac-65ca7915d728-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.004s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.252309] env[62816]: INFO nova.compute.manager [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Terminating instance [ 1687.258293] env[62816]: DEBUG nova.compute.manager [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1687.259023] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1687.260399] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f7adf2-48b9-4698-82d8-51edcb59056a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.271154] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1687.272160] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca63a39d-271e-4d9e-aff4-18de3b0e6d99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.279559] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1687.279559] env[62816]: value = "task-1788754" [ 1687.279559] env[62816]: _type = "Task" [ 1687.279559] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.287503] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.304486] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520bf409-8610-6c3c-0110-0ca6c7c757c1, 'name': SearchDatastore_Task, 'duration_secs': 0.013359} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.305353] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ea22891-eb81-4d01-897d-294a5c41132d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.312772] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1687.312772] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237c1ae-4b7b-ab58-328d-72f494eae2d6" [ 1687.312772] env[62816]: _type = "Task" [ 1687.312772] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.324510] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5237c1ae-4b7b-ab58-328d-72f494eae2d6, 'name': SearchDatastore_Task, 'duration_secs': 0.009809} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.324794] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.325070] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb/3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1687.325318] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2af222f2-4a33-4beb-8026-721606a14c54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.337227] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1687.337227] env[62816]: value = "task-1788755" [ 1687.337227] env[62816]: _type = "Task" [ 1687.337227] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.344769] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.358847] env[62816]: DEBUG nova.network.neutron [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updating instance_info_cache with network_info: [{"id": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "address": "fa:16:3e:84:b5:d3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5a67df-81", "ovs_interfaceid": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.362161] env[62816]: DEBUG nova.network.neutron [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Updated VIF entry in instance network info cache for port 479053d9-125e-49d2-94b6-1c48422ea761. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1687.362537] env[62816]: DEBUG nova.network.neutron [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Updating instance_info_cache with network_info: [{"id": "479053d9-125e-49d2-94b6-1c48422ea761", "address": "fa:16:3e:fa:a2:c0", "network": {"id": "97ba9fa7-300a-497f-aef3-65932440f795", "bridge": "br-int", "label": "tempest-ImagesTestJSON-2146608026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2d830983a3c14168b8f0b67478f27589", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap479053d9-12", "ovs_interfaceid": "479053d9-125e-49d2-94b6-1c48422ea761", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.521412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.465s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.522367] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1687.525764] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.029s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.525764] env[62816]: DEBUG nova.objects.instance [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lazy-loading 'resources' on Instance uuid 0a1a8539-940a-4a17-9826-82736be41892 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.791812] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.850230] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488252} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.850230] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb/3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1687.850230] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1687.850230] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-799d0a47-b78d-48e1-b973-d3a7e292ef4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.862809] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.865183] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1687.865183] env[62816]: value = "task-1788757" [ 1687.865183] env[62816]: _type = "Task" [ 1687.865183] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.868144] env[62816]: DEBUG oslo_concurrency.lockutils [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] Releasing lock "refresh_cache-3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.868144] env[62816]: DEBUG nova.compute.manager [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Received event network-vif-deleted-3dd38577-9c08-47e5-afe3-ca2e34af1424 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1687.868144] env[62816]: DEBUG nova.compute.manager [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Received event network-changed-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1687.868144] env[62816]: DEBUG nova.compute.manager [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Refreshing instance network info cache due to event network-changed-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1687.868144] env[62816]: DEBUG oslo_concurrency.lockutils [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] Acquiring lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.868144] env[62816]: DEBUG oslo_concurrency.lockutils [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] Acquired lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.868144] env[62816]: DEBUG nova.network.neutron [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Refreshing network info cache for port 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1687.874412] env[62816]: DEBUG nova.network.neutron [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Successfully updated port: 9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.880039] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788757, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.031869] env[62816]: DEBUG nova.compute.utils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1688.033873] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1688.034052] env[62816]: DEBUG nova.network.neutron [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1688.079923] env[62816]: DEBUG nova.policy [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0a2129bc83a45d695730796b55f1caf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72d49b085afa4df99700ea4e15e9c87e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1688.236950] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.237261] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.262089] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "9ab4e631-5b31-4b37-9b49-4f0423286752" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.262390] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.290737] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788754, 'name': PowerOffVM_Task, 'duration_secs': 0.789738} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.292221] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.292406] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1688.297523] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a41d5e2-4d78-4915-9cbf-998128e7ede6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.299434] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "2583e2ba-8904-420c-a417-d6af71bfa9ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.299655] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.347212] env[62816]: DEBUG nova.compute.manager [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Received event network-vif-plugged-9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1688.347475] env[62816]: DEBUG oslo_concurrency.lockutils [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] Acquiring lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.347689] env[62816]: DEBUG oslo_concurrency.lockutils [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.347854] env[62816]: DEBUG oslo_concurrency.lockutils [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.348031] env[62816]: DEBUG nova.compute.manager [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] No waiting events found dispatching network-vif-plugged-9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1688.348221] env[62816]: WARNING nova.compute.manager [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Received unexpected event network-vif-plugged-9298a5b6-bb53-483a-aedc-c756f3d8484d for instance with vm_state building and task_state spawning. [ 1688.348405] env[62816]: DEBUG nova.compute.manager [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Received event network-changed-9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1688.348560] env[62816]: DEBUG nova.compute.manager [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Refreshing instance network info cache due to event network-changed-9298a5b6-bb53-483a-aedc-c756f3d8484d. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1688.348738] env[62816]: DEBUG oslo_concurrency.lockutils [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] Acquiring lock "refresh_cache-f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.348873] env[62816]: DEBUG oslo_concurrency.lockutils [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] Acquired lock "refresh_cache-f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.349288] env[62816]: DEBUG nova.network.neutron [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Refreshing network info cache for port 9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1688.352429] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a98a4a-aabb-48f1-b4df-c18503adae74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.362826] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9389e451-be22-4d1d-aa63-61d9d21545e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.401568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "refresh_cache-f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.403970] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f09344-637a-49c6-841d-14507bd1a3b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.408806] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1688.409524] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1688.409524] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Deleting the datastore file [datastore1] 8ccce660-6c41-412d-99ac-65ca7915d728 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1688.409803] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1688.414410] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-baaeadc3-6899-48ce-9f3a-b37652733b3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.419021] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1af6d705-262d-4c2d-917e-7295c84cf8e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.419021] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788757, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088088} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.419021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1688.420072] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ef4f23-0e95-4c6e-a617-f1fc6defe6af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.428766] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a6f179-75ad-42b4-8d2f-7373149675c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.433561] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1688.433561] env[62816]: value = "task-1788759" [ 1688.433561] env[62816]: _type = "Task" [ 1688.433561] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.433989] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for the task: (returnval){ [ 1688.433989] env[62816]: value = "task-1788760" [ 1688.433989] env[62816]: _type = "Task" [ 1688.433989] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.453060] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb/3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1688.463964] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97d2ab96-6e9e-4498-b5bf-5f9a07ba9bd0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.487402] env[62816]: DEBUG nova.compute.provider_tree [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.494776] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788759, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.498546] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.498854] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1688.498854] env[62816]: value = "task-1788761" [ 1688.498854] env[62816]: _type = "Task" [ 1688.498854] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.503050] env[62816]: DEBUG nova.network.neutron [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Successfully created port: c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1688.511060] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788761, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.536649] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1688.704622] env[62816]: DEBUG nova.network.neutron [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updated VIF entry in instance network info cache for port 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1688.704997] env[62816]: DEBUG nova.network.neutron [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating instance_info_cache with network_info: [{"id": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "address": "fa:16:3e:55:1f:0b", "network": {"id": "c4822de1-74e3-43d6-aa09-56d0d753dd5a", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-54756075-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e4a92683e3f457bb157966a92b48577", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e238ac23-819b-452f-9015-52922e45efd3", "external-id": "nsx-vlan-transportzone-127", "segmentation_id": 127, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cd3caf0-0f", "ovs_interfaceid": "2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.745524] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1688.771250] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1688.805061] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1688.889295] env[62816]: DEBUG nova.network.neutron [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1688.957012] env[62816]: DEBUG oslo_vmware.api [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Task: {'id': task-1788760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152292} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.960368] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1688.960368] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1688.960504] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1688.961062] env[62816]: INFO nova.compute.manager [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1688.961062] env[62816]: DEBUG oslo.service.loopingcall [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1688.961378] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788759, 'name': PowerOffVM_Task, 'duration_secs': 0.458861} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.961676] env[62816]: DEBUG nova.compute.manager [-] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1688.961827] env[62816]: DEBUG nova.network.neutron [-] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1688.963482] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.964253] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55a4ec68-928e-4fa7-ac11-8330a65df3e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.985106] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3850ece2-614e-446d-b314-18ca2e327497 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.991034] env[62816]: DEBUG nova.scheduler.client.report [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1688.997710] env[62816]: DEBUG nova.network.neutron [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.010833] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788761, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.030363] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1689.030648] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6619989b-e794-4cf4-b020-335ec7fc6ed5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.039265] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1689.039265] env[62816]: value = "task-1788762" [ 1689.039265] env[62816]: _type = "Task" [ 1689.039265] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.055351] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1689.055351] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1689.055483] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.055681] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.055895] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1689.056159] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d3d10e8-cad1-41ae-8038-82fba670c1a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.065272] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1689.065455] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1689.066186] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ec7c6ae-067e-4633-bb2e-fccce868381d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.071758] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1689.071758] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52507e3e-b7c4-5779-656a-35075f0089e9" [ 1689.071758] env[62816]: _type = "Task" [ 1689.071758] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.080746] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52507e3e-b7c4-5779-656a-35075f0089e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.210133] env[62816]: DEBUG oslo_concurrency.lockutils [req-f9abd1c0-dd3c-4d3f-a441-818acb973fc5 req-7749e945-4c7d-4a0f-9218-e464e384f249 service nova] Releasing lock "refresh_cache-d03ed540-5c20-4bcb-ac7e-eec8c09e4451" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.219165] env[62816]: DEBUG nova.compute.manager [req-acc62184-0e78-4a95-8e7c-3681e474aadc req-b6ba3ada-3c4e-4bf3-be1b-6abb599857f4 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Received event network-vif-deleted-9bef2124-4f7f-4111-aa0d-0c2d63ad71c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1689.219426] env[62816]: INFO nova.compute.manager [req-acc62184-0e78-4a95-8e7c-3681e474aadc req-b6ba3ada-3c4e-4bf3-be1b-6abb599857f4 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Neutron deleted interface 9bef2124-4f7f-4111-aa0d-0c2d63ad71c6; detaching it from the instance and deleting it from the info cache [ 1689.219682] env[62816]: DEBUG nova.network.neutron [req-acc62184-0e78-4a95-8e7c-3681e474aadc req-b6ba3ada-3c4e-4bf3-be1b-6abb599857f4 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.266139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.288195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.323839] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.496389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.971s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.498831] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.224s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.499095] env[62816]: DEBUG nova.objects.instance [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lazy-loading 'resources' on Instance uuid 679cd9a3-2ed6-451f-b934-ba7738913959 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.501031] env[62816]: DEBUG oslo_concurrency.lockutils [req-08b934b5-2b07-42bc-aa0a-1848e24743a0 req-18e3931b-4041-4f70-9e8a-96e6c75a47a0 service nova] Releasing lock "refresh_cache-f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.501177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquired lock "refresh_cache-f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.501406] env[62816]: DEBUG nova.network.neutron [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1689.515934] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788761, 'name': ReconfigVM_Task, 'duration_secs': 0.920177} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.516801] env[62816]: INFO nova.scheduler.client.report [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted allocations for instance 0a1a8539-940a-4a17-9826-82736be41892 [ 1689.517824] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb/3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1689.518686] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1af19879-8005-4414-a379-c69ea79c4e31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.529156] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1689.529156] env[62816]: value = "task-1788763" [ 1689.529156] env[62816]: _type = "Task" [ 1689.529156] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.539173] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788763, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.550940] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1689.580268] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1689.580533] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1689.580692] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1689.580867] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1689.581018] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1689.581212] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1689.581455] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1689.581621] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1689.581788] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1689.582045] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1689.582237] env[62816]: DEBUG nova.virt.hardware [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1689.583072] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1b8471-e86d-49c3-a22c-1ddd50dc0682 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.591049] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52507e3e-b7c4-5779-656a-35075f0089e9, 'name': SearchDatastore_Task, 'duration_secs': 0.0096} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.592575] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13002945-7d33-4052-a8b1-951a770cd888 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.598931] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d9aa31-77e5-40f9-8393-c59fde5a07eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.604976] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1689.604976] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52792142-44f4-5d0f-2b88-2bd49562ab7e" [ 1689.604976] env[62816]: _type = "Task" [ 1689.604976] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.625889] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52792142-44f4-5d0f-2b88-2bd49562ab7e, 'name': SearchDatastore_Task, 'duration_secs': 0.011183} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.626180] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.626499] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. {{(pid=62816) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1689.626738] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3f5586b8-135b-4814-8116-9413d7afb99c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.635255] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1689.635255] env[62816]: value = "task-1788764" [ 1689.635255] env[62816]: _type = "Task" [ 1689.635255] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.644620] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788764, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.694742] env[62816]: DEBUG nova.network.neutron [-] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.722245] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e91922ed-d397-411e-86b0-bf8c68679c87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.734898] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91716de5-16c1-44ba-be9e-d9ea09c55915 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.773515] env[62816]: DEBUG nova.compute.manager [req-acc62184-0e78-4a95-8e7c-3681e474aadc req-b6ba3ada-3c4e-4bf3-be1b-6abb599857f4 service nova] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Detach interface failed, port_id=9bef2124-4f7f-4111-aa0d-0c2d63ad71c6, reason: Instance 8ccce660-6c41-412d-99ac-65ca7915d728 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1690.027641] env[62816]: DEBUG oslo_concurrency.lockutils [None req-870250a0-49af-41fa-8b9e-6cb599a9e7d0 tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0a1a8539-940a-4a17-9826-82736be41892" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.183s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.048073] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788763, 'name': Rename_Task, 'duration_secs': 0.21554} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.048073] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1690.048073] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2794a67e-7666-4e4a-8a15-fac814b68ef8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.064141] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1690.064141] env[62816]: value = "task-1788766" [ 1690.064141] env[62816]: _type = "Task" [ 1690.064141] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.075309] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.095387] env[62816]: DEBUG nova.network.neutron [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1690.146353] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788764, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.184338] env[62816]: DEBUG nova.network.neutron [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Successfully updated port: c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1690.197537] env[62816]: INFO nova.compute.manager [-] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Took 1.24 seconds to deallocate network for instance. [ 1690.294679] env[62816]: DEBUG nova.network.neutron [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Updating instance_info_cache with network_info: [{"id": "9298a5b6-bb53-483a-aedc-c756f3d8484d", "address": "fa:16:3e:cb:48:25", "network": {"id": "1a3e96b9-ef8b-4224-9b97-05dd53f168c5", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-911355462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35d54c38b25c446cb8f44f2d7f419db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9298a5b6-bb", "ovs_interfaceid": "9298a5b6-bb53-483a-aedc-c756f3d8484d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.336094] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2125d0d3-5930-492e-99e9-90f3f125c8fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.344046] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6f8a61-6a21-4028-abf2-e6743d6c64b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.378317] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df43387-80da-4362-85da-2fad002cbc47 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.381873] env[62816]: DEBUG nova.compute.manager [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Received event network-vif-plugged-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1690.382096] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] Acquiring lock "4ab07a21-2685-42bc-af13-b95473993d6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.382316] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] Lock "4ab07a21-2685-42bc-af13-b95473993d6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.382497] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] Lock "4ab07a21-2685-42bc-af13-b95473993d6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.382677] env[62816]: DEBUG nova.compute.manager [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] No waiting events found dispatching network-vif-plugged-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1690.382847] env[62816]: WARNING nova.compute.manager [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Received unexpected event network-vif-plugged-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd for instance with vm_state building and task_state spawning. [ 1690.383020] env[62816]: DEBUG nova.compute.manager [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Received event network-changed-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1690.383185] env[62816]: DEBUG nova.compute.manager [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Refreshing instance network info cache due to event network-changed-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1690.383366] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] Acquiring lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.383498] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] Acquired lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.383649] env[62816]: DEBUG nova.network.neutron [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Refreshing network info cache for port c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1690.391263] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac575cf0-0314-4f63-9bab-3092a6362b32 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.406439] env[62816]: DEBUG nova.compute.provider_tree [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1690.548485] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "0e0261fe-4376-487c-9d54-c4f37577409c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.548746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0e0261fe-4376-487c-9d54-c4f37577409c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.548955] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "0e0261fe-4376-487c-9d54-c4f37577409c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.549155] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0e0261fe-4376-487c-9d54-c4f37577409c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.549328] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0e0261fe-4376-487c-9d54-c4f37577409c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.551628] env[62816]: INFO nova.compute.manager [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Terminating instance [ 1690.553373] env[62816]: DEBUG nova.compute.manager [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1690.553564] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1690.554423] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9160f876-6009-4ccf-b0b0-787ba27ccd7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.562879] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1690.563125] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f661f49-7248-4448-919e-f08849280a87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.572749] env[62816]: DEBUG oslo_vmware.api [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788766, 'name': PowerOnVM_Task, 'duration_secs': 0.475261} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.573914] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1690.574131] env[62816]: INFO nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Took 7.82 seconds to spawn the instance on the hypervisor. [ 1690.574309] env[62816]: DEBUG nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1690.574609] env[62816]: DEBUG oslo_vmware.api [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1690.574609] env[62816]: value = "task-1788767" [ 1690.574609] env[62816]: _type = "Task" [ 1690.574609] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.575303] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6040407d-bfbd-4bc0-82c6-08d1cc1e396d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.585960] env[62816]: DEBUG oslo_vmware.api [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.647457] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788764, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.687326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.708623] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.799206] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Releasing lock "refresh_cache-f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1690.799646] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Instance network_info: |[{"id": "9298a5b6-bb53-483a-aedc-c756f3d8484d", "address": "fa:16:3e:cb:48:25", "network": {"id": "1a3e96b9-ef8b-4224-9b97-05dd53f168c5", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-911355462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35d54c38b25c446cb8f44f2d7f419db0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9298a5b6-bb", "ovs_interfaceid": "9298a5b6-bb53-483a-aedc-c756f3d8484d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1690.800120] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:48:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9298a5b6-bb53-483a-aedc-c756f3d8484d', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1690.807996] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Creating folder: Project (35d54c38b25c446cb8f44f2d7f419db0). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1690.808339] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f16856bd-fb86-484a-be64-9155c1684fa1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.838351] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Created folder: Project (35d54c38b25c446cb8f44f2d7f419db0) in parent group-v370905. [ 1690.838584] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Creating folder: Instances. Parent ref: group-v371114. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1690.838850] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3f9a08e-8554-4ce3-abcf-b48c2b3ecac7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.848672] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Created folder: Instances in parent group-v371114. [ 1690.848914] env[62816]: DEBUG oslo.service.loopingcall [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1690.849121] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1690.849373] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-994e312b-a4a4-4c2c-9ccf-44e8a4e5bf47 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.870132] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1690.870132] env[62816]: value = "task-1788770" [ 1690.870132] env[62816]: _type = "Task" [ 1690.870132] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.881971] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788770, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.909596] env[62816]: DEBUG nova.scheduler.client.report [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1690.918260] env[62816]: DEBUG nova.network.neutron [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1691.003090] env[62816]: DEBUG nova.network.neutron [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.088449] env[62816]: DEBUG oslo_vmware.api [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788767, 'name': PowerOffVM_Task, 'duration_secs': 0.464729} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.088704] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1691.089036] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1691.089146] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43a27ec2-a74a-434b-ad6a-262f521d7375 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.099314] env[62816]: INFO nova.compute.manager [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Took 21.92 seconds to build instance. [ 1691.148872] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788764, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.496996} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.149182] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. [ 1691.150322] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db48c595-f9e5-49b5-9293-0493411abb2e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.188174] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1691.189811] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7852c77-0968-4a7c-ae9a-c8c37ce252d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.210342] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1691.210342] env[62816]: value = "task-1788772" [ 1691.210342] env[62816]: _type = "Task" [ 1691.210342] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.219149] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788772, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.248060] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1691.248060] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1691.248060] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleting the datastore file [datastore1] 0e0261fe-4376-487c-9d54-c4f37577409c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1691.248425] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd49584b-9ef1-4e58-bffa-63c61ae0d27a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.257273] env[62816]: DEBUG oslo_vmware.api [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for the task: (returnval){ [ 1691.257273] env[62816]: value = "task-1788773" [ 1691.257273] env[62816]: _type = "Task" [ 1691.257273] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.267329] env[62816]: DEBUG oslo_vmware.api [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.380664] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788770, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.414450] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.416958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.561s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.417582] env[62816]: DEBUG nova.objects.instance [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lazy-loading 'resources' on Instance uuid a5f50ca4-4648-4f33-a6d3-18cfc4fd3441 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1691.439832] env[62816]: INFO nova.scheduler.client.report [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Deleted allocations for instance 679cd9a3-2ed6-451f-b934-ba7738913959 [ 1691.507036] env[62816]: DEBUG oslo_concurrency.lockutils [req-e1d2f6b2-9fa5-4b5b-81c1-337fd63fa951 req-51daea88-c3f3-42f2-a6c9-5eefe97b4fa7 service nova] Releasing lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.507036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.507036] env[62816]: DEBUG nova.network.neutron [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1691.601197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a10d2d8b-1915-487c-821f-167113996055 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.435s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.722756] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788772, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.765592] env[62816]: DEBUG oslo_vmware.api [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Task: {'id': task-1788773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162303} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.765846] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1691.766047] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1691.766230] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1691.766403] env[62816]: INFO nova.compute.manager [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1691.766645] env[62816]: DEBUG oslo.service.loopingcall [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1691.766830] env[62816]: DEBUG nova.compute.manager [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1691.766926] env[62816]: DEBUG nova.network.neutron [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1691.881616] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788770, 'name': CreateVM_Task, 'duration_secs': 0.608758} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.881813] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1691.882517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.882691] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.883026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1691.883287] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62508f87-efc1-45f6-a27d-565200283bec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.889583] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1691.889583] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ed33e4-b91b-9656-73e9-a5dc9871f341" [ 1691.889583] env[62816]: _type = "Task" [ 1691.889583] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.898228] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ed33e4-b91b-9656-73e9-a5dc9871f341, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.949728] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3685479-a240-4ae2-b199-3e1c010ae3a4 tempest-ServersNegativeTestJSON-2121561785 tempest-ServersNegativeTestJSON-2121561785-project-member] Lock "679cd9a3-2ed6-451f-b934-ba7738913959" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.032s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.019343] env[62816]: DEBUG nova.compute.manager [req-86b6379f-b8fd-4433-96f5-0255e8c7705d req-16b98401-b1fb-4320-8254-3917d61ad95a service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Received event network-vif-deleted-b2b6a990-a634-4e68-ba4c-886b856209a5 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1692.019610] env[62816]: INFO nova.compute.manager [req-86b6379f-b8fd-4433-96f5-0255e8c7705d req-16b98401-b1fb-4320-8254-3917d61ad95a service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Neutron deleted interface b2b6a990-a634-4e68-ba4c-886b856209a5; detaching it from the instance and deleting it from the info cache [ 1692.020340] env[62816]: DEBUG nova.network.neutron [req-86b6379f-b8fd-4433-96f5-0255e8c7705d req-16b98401-b1fb-4320-8254-3917d61ad95a service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.047990] env[62816]: DEBUG nova.network.neutron [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1692.206901] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e810452a-9c92-49c5-8004-eec19810d3db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.212619] env[62816]: DEBUG nova.network.neutron [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [{"id": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "address": "fa:16:3e:de:ed:af", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c7e341-ff", "ovs_interfaceid": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.223793] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fabf7d7-cfbd-4154-95fd-0c7669588108 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.231076] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788772, 'name': ReconfigVM_Task, 'duration_secs': 0.831424} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.231795] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Reconfigured VM instance instance-00000043 to attach disk [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1692.232702] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1287fe41-db5b-4382-9fa3-953440459727 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.262434] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb710113-dcab-456f-b9fa-3bb9d8ffce55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.281926] env[62816]: DEBUG nova.compute.manager [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1692.288052] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe723a86-d250-48fe-a364-a78bc8415bfb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.291043] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79ad6548-906d-48be-8a18-e3c29cfea863 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.306478] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4db5ccf-d887-493e-9f84-060d237b300c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.312721] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1692.312721] env[62816]: value = "task-1788775" [ 1692.312721] env[62816]: _type = "Task" [ 1692.312721] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.324318] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788775, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.332536] env[62816]: DEBUG nova.compute.provider_tree [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.407014] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ed33e4-b91b-9656-73e9-a5dc9871f341, 'name': SearchDatastore_Task, 'duration_secs': 0.010557} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.407014] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.407014] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1692.407014] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.407014] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.407014] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1692.407014] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc53df9e-12c8-4823-a850-0ab435572645 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.418953] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1692.419184] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1692.423027] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7a7252e-70c2-4ccc-b2a6-8258e1a500ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.426971] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1692.426971] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e0c566-1a23-fc5a-226d-3eb84f5863d2" [ 1692.426971] env[62816]: _type = "Task" [ 1692.426971] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.437227] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e0c566-1a23-fc5a-226d-3eb84f5863d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.496706] env[62816]: DEBUG nova.network.neutron [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.524026] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bdb3dda-1bda-46c4-a1f3-442f461eee06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.537593] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f01001-4349-435a-a047-920146ba8ff7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.577388] env[62816]: DEBUG nova.compute.manager [req-86b6379f-b8fd-4433-96f5-0255e8c7705d req-16b98401-b1fb-4320-8254-3917d61ad95a service nova] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Detach interface failed, port_id=b2b6a990-a634-4e68-ba4c-886b856209a5, reason: Instance 0e0261fe-4376-487c-9d54-c4f37577409c could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1692.715520] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.715845] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Instance network_info: |[{"id": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "address": "fa:16:3e:de:ed:af", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c7e341-ff", "ovs_interfaceid": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1692.716726] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:ed:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1c7e341-ffdc-440b-8b2a-6dff7559b1bd', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1692.724850] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating folder: Project (72d49b085afa4df99700ea4e15e9c87e). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.725303] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c08775bd-c6a5-495c-a354-70f8d5a0934e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.741080] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created folder: Project (72d49b085afa4df99700ea4e15e9c87e) in parent group-v370905. [ 1692.741080] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating folder: Instances. Parent ref: group-v371118. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1692.741080] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d735ef5-7958-48b8-99ef-c2647ab8eee3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.755472] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created folder: Instances in parent group-v371118. [ 1692.755472] env[62816]: DEBUG oslo.service.loopingcall [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1692.755472] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1692.755472] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd0536d6-46f3-4636-a897-f934c1779533 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.777096] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1692.777096] env[62816]: value = "task-1788778" [ 1692.777096] env[62816]: _type = "Task" [ 1692.777096] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.786224] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788778, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.816892] env[62816]: INFO nova.compute.manager [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] instance snapshotting [ 1692.823420] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6e14f1-1448-4aaf-bc3f-cd659373c987 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.838471] env[62816]: DEBUG nova.scheduler.client.report [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1692.843060] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788775, 'name': ReconfigVM_Task, 'duration_secs': 0.224346} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.857704] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1692.858857] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50ca350b-8a19-43e9-b3df-3ed7071c332d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.861374] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecc9a7c-5eef-42b2-b5af-abd914d916ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.874542] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1692.874542] env[62816]: value = "task-1788779" [ 1692.874542] env[62816]: _type = "Task" [ 1692.874542] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.888261] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788779, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.939418] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e0c566-1a23-fc5a-226d-3eb84f5863d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011877} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.940563] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-238bb596-982b-48fe-a879-516646d5516f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.948190] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1692.948190] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520a7b14-bf74-2017-5ec4-0d91536f884e" [ 1692.948190] env[62816]: _type = "Task" [ 1692.948190] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.960015] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520a7b14-bf74-2017-5ec4-0d91536f884e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.999017] env[62816]: INFO nova.compute.manager [-] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Took 1.23 seconds to deallocate network for instance. [ 1693.288419] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788778, 'name': CreateVM_Task, 'duration_secs': 0.374386} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.288660] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1693.289261] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.289430] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.289763] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1693.290043] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-923ee295-b35d-41c0-bc52-6f5f68dd6f54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.295718] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1693.295718] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527c93b0-0165-d86f-ea9b-040e0822a4d2" [ 1693.295718] env[62816]: _type = "Task" [ 1693.295718] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.305780] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527c93b0-0165-d86f-ea9b-040e0822a4d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.344447] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.346777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.081s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.348409] env[62816]: INFO nova.compute.claims [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1693.373895] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1693.374241] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-dbd9eb14-cd21-4827-96e3-60f24f98c006 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.378722] env[62816]: INFO nova.scheduler.client.report [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Deleted allocations for instance a5f50ca4-4648-4f33-a6d3-18cfc4fd3441 [ 1693.390554] env[62816]: DEBUG oslo_vmware.api [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788779, 'name': PowerOnVM_Task, 'duration_secs': 0.447518} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.392058] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1693.393845] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1693.393845] env[62816]: value = "task-1788780" [ 1693.393845] env[62816]: _type = "Task" [ 1693.393845] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.395176] env[62816]: DEBUG nova.compute.manager [None req-c7d78136-256c-4fb0-bd8c-f33d0941b316 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1693.396353] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1d4657-2a98-4250-825d-1d4ca334c662 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.407882] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788780, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.460491] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520a7b14-bf74-2017-5ec4-0d91536f884e, 'name': SearchDatastore_Task, 'duration_secs': 0.011317} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.460774] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.461590] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72/f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1693.461590] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cde6a4b3-d5d2-4e5d-9a56-adcaefaa40d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.471247] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1693.471247] env[62816]: value = "task-1788781" [ 1693.471247] env[62816]: _type = "Task" [ 1693.471247] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.481685] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.507009] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.808728] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527c93b0-0165-d86f-ea9b-040e0822a4d2, 'name': SearchDatastore_Task, 'duration_secs': 0.012687} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.809534] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.809947] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1693.811301] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.811663] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.813122] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1693.813452] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-605e26d1-5caa-476c-bea9-435a13afe73d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.847024] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1693.847024] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1693.847024] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-328c848e-0672-4b47-a139-03e6a2319f46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.855019] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1693.855019] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a6c256-ff0d-314b-5cce-3e90efb4551b" [ 1693.855019] env[62816]: _type = "Task" [ 1693.855019] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.867664] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a6c256-ff0d-314b-5cce-3e90efb4551b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.890030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7fb5a8db-ba40-45e1-b1ba-16c0076509d2 tempest-ServerGroupTestJSON-864785891 tempest-ServerGroupTestJSON-864785891-project-member] Lock "a5f50ca4-4648-4f33-a6d3-18cfc4fd3441" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.175s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.908575] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788780, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.981669] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496927} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.981822] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72/f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1693.982036] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1693.982306] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ae7e64e-33fa-465b-8de3-f6ecde4abf03 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.989981] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1693.989981] env[62816]: value = "task-1788782" [ 1693.989981] env[62816]: _type = "Task" [ 1693.989981] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.000827] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788782, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.366811] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a6c256-ff0d-314b-5cce-3e90efb4551b, 'name': SearchDatastore_Task, 'duration_secs': 0.055104} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.367826] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04f36c60-6e18-47db-97d7-86f610cb29b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.373868] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1694.373868] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52548033-8f3b-6a9a-1fe0-fb1496818a8d" [ 1694.373868] env[62816]: _type = "Task" [ 1694.373868] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.384558] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52548033-8f3b-6a9a-1fe0-fb1496818a8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.406010] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788780, 'name': CreateSnapshot_Task, 'duration_secs': 0.862114} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.406291] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1694.407060] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5f7df9-4a49-40d7-8435-be4343f94e63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.500631] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788782, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064789} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.503254] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1694.504138] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006da296-1337-4fe1-be3e-3caff08cbbd8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.518965] env[62816]: INFO nova.compute.manager [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Unrescuing [ 1694.519360] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1694.519518] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1694.519714] env[62816]: DEBUG nova.network.neutron [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1694.530590] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72/f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1694.536743] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c66171c8-7ab2-423a-98a0-aea6f4388c1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.567209] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1694.567209] env[62816]: value = "task-1788784" [ 1694.567209] env[62816]: _type = "Task" [ 1694.567209] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.578901] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788784, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.675415] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1ed07e-0a3b-4487-8c69-66e2ee4bb15a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.684107] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01dbe2cc-e1df-4176-af6d-207ad369d03c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.716720] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d3dab01-69e8-4e55-b48f-41606da7c0fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.725505] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12a69b4-d777-4db8-8a4c-2bd7a9c73c01 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.739387] env[62816]: DEBUG nova.compute.provider_tree [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1694.885992] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52548033-8f3b-6a9a-1fe0-fb1496818a8d, 'name': SearchDatastore_Task, 'duration_secs': 0.010024} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.886311] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1694.886577] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4ab07a21-2685-42bc-af13-b95473993d6f/4ab07a21-2685-42bc-af13-b95473993d6f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1694.886850] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06929dd7-11e5-478a-8a33-8bf7eb8edf4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.897028] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1694.897028] env[62816]: value = "task-1788785" [ 1694.897028] env[62816]: _type = "Task" [ 1694.897028] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.903374] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788785, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.927456] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1694.927783] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8205ea82-21d1-4a02-9a13-5ed932190871 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.936644] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1694.936644] env[62816]: value = "task-1788786" [ 1694.936644] env[62816]: _type = "Task" [ 1694.936644] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.945456] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788786, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.080286] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788784, 'name': ReconfigVM_Task, 'duration_secs': 0.295178} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.080613] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Reconfigured VM instance instance-00000049 to attach disk [datastore1] f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72/f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1695.081241] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4a778fd7-382f-4a7d-be6b-e1b28118b438 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.089451] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1695.089451] env[62816]: value = "task-1788787" [ 1695.089451] env[62816]: _type = "Task" [ 1695.089451] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.116687] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788787, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.243171] env[62816]: DEBUG nova.scheduler.client.report [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1695.352993] env[62816]: DEBUG nova.network.neutron [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updating instance_info_cache with network_info: [{"id": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "address": "fa:16:3e:84:b5:d3", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b5a67df-81", "ovs_interfaceid": "3b5a67df-8153-4be4-8afe-6bd3ae2b807b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.408426] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788785, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510449} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.408426] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4ab07a21-2685-42bc-af13-b95473993d6f/4ab07a21-2685-42bc-af13-b95473993d6f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1695.408426] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1695.408426] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-974533dd-5aff-4083-850c-2298f8b60269 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.418431] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1695.418431] env[62816]: value = "task-1788788" [ 1695.418431] env[62816]: _type = "Task" [ 1695.418431] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.428722] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788788, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.450194] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788786, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.602354] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788787, 'name': Rename_Task, 'duration_secs': 0.441364} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.602814] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.603343] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09e94099-cf37-4bf0-a4a0-8714aa5ea1aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.612189] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1695.612189] env[62816]: value = "task-1788789" [ 1695.612189] env[62816]: _type = "Task" [ 1695.612189] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.624184] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788789, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.749240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.402s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.749728] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1695.752452] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.464s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.753667] env[62816]: INFO nova.compute.claims [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.855799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-c66fa160-d4dd-429f-8751-f36cb2020ff1" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.857415] env[62816]: DEBUG nova.objects.instance [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'flavor' on Instance uuid c66fa160-d4dd-429f-8751-f36cb2020ff1 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1695.930012] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788788, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07766} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.930402] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1695.931323] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9251d08-d61b-4303-93ff-843e06666093 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.958856] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 4ab07a21-2685-42bc-af13-b95473993d6f/4ab07a21-2685-42bc-af13-b95473993d6f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1695.962132] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdd8e793-d505-4b29-9f45-0deebc9c5448 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.982865] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788786, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.984663] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1695.984663] env[62816]: value = "task-1788790" [ 1695.984663] env[62816]: _type = "Task" [ 1695.984663] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.994524] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.123136] env[62816]: DEBUG oslo_vmware.api [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788789, 'name': PowerOnVM_Task, 'duration_secs': 0.478407} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.123456] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.123632] env[62816]: INFO nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1696.123814] env[62816]: DEBUG nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1696.124648] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed32d988-53f6-4930-b8a9-58ce63e8eca0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.257477] env[62816]: DEBUG nova.compute.utils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1696.260721] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1696.260922] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1696.306393] env[62816]: DEBUG nova.policy [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5a5304d5b9c42a18b08663b3249667c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4b7a9cb88e246bc864edfe211953dea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1696.363020] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3898781-3dc2-4871-8a35-86d6732d5d23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.384889] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.385288] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0248d91-55fe-45ab-82f8-4171ef80bbc4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.395914] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1696.395914] env[62816]: value = "task-1788792" [ 1696.395914] env[62816]: _type = "Task" [ 1696.395914] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.405281] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788792, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.461777] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788786, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.496995] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788790, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.588937] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Successfully created port: a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1696.642894] env[62816]: INFO nova.compute.manager [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Took 21.39 seconds to build instance. [ 1696.763674] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1696.907264] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788792, 'name': PowerOffVM_Task, 'duration_secs': 0.221687} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.907606] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.913101] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1696.915577] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-708ce5a7-0388-4ce5-8dce-80982baab9d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.936677] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1696.936677] env[62816]: value = "task-1788793" [ 1696.936677] env[62816]: _type = "Task" [ 1696.936677] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.947735] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788793, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.961163] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788786, 'name': CloneVM_Task, 'duration_secs': 1.610049} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.961572] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Created linked-clone VM from snapshot [ 1696.962371] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519a8880-5129-4b4a-9554-288904022803 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.974887] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Uploading image 545b7d14-7ff2-4d9b-bd3d-749658ab05db {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1696.990487] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1696.990752] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e2cc4cd0-ede1-4c91-9931-ad3a6dcbdec8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.000200] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788790, 'name': ReconfigVM_Task, 'duration_secs': 0.554132} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.001546] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 4ab07a21-2685-42bc-af13-b95473993d6f/4ab07a21-2685-42bc-af13-b95473993d6f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1697.002422] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1697.002422] env[62816]: value = "task-1788794" [ 1697.002422] env[62816]: _type = "Task" [ 1697.002422] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.002641] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-481e478b-8183-4163-bf10-67a4e7e59e0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.017779] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788794, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.019649] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1697.019649] env[62816]: value = "task-1788795" [ 1697.019649] env[62816]: _type = "Task" [ 1697.019649] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.054169] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011b8395-4ddd-4bf7-89f0-b654faebaeb5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.063251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ae379d-72b4-44d4-bc14-320e172d8441 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.095705] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d84b2a-0667-4cad-9530-3fb152b66d64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.104133] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5b404e-63a0-4d34-91fa-26c463c3dcd4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.118349] env[62816]: DEBUG nova.compute.provider_tree [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1697.144907] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2371a861-eb58-44cf-9098-aa4b938b64e9 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.901s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.447112] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788793, 'name': ReconfigVM_Task, 'duration_secs': 0.448009} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.447425] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1697.447634] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1697.447866] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0c8c9c5-72fe-4814-9a7c-d50ec6f2c413 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.455166] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1697.455166] env[62816]: value = "task-1788796" [ 1697.455166] env[62816]: _type = "Task" [ 1697.455166] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.463218] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.514087] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788794, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.528911] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788795, 'name': Rename_Task, 'duration_secs': 0.298768} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.529215] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1697.529455] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a3569b37-68fe-4a21-8412-97d17e1586f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.535979] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1697.535979] env[62816]: value = "task-1788797" [ 1697.535979] env[62816]: _type = "Task" [ 1697.535979] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.545233] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.623267] env[62816]: DEBUG nova.scheduler.client.report [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1697.776639] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1697.815034] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1697.815034] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1697.815385] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1697.815780] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1697.817574] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1697.818875] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7850f62d-8cf9-4ef1-8cdb-24e2790574a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.832471] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38d8381-bc09-4a3a-b8bc-d2a314bef3e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.966814] env[62816]: DEBUG oslo_vmware.api [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788796, 'name': PowerOnVM_Task, 'duration_secs': 0.391742} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.967134] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1697.967445] env[62816]: DEBUG nova.compute.manager [None req-7085f218-da3c-4aa0-9da6-4558414f69a8 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1697.968246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ac671f-01af-4c24-812c-8a69310b80ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.015380] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788794, 'name': Destroy_Task, 'duration_secs': 0.873732} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.016421] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Destroyed the VM [ 1698.016668] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1698.016918] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a1ac06a6-2003-4694-b813-b4c7ae534b24 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.025789] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1698.025789] env[62816]: value = "task-1788798" [ 1698.025789] env[62816]: _type = "Task" [ 1698.025789] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.035460] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788798, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.046378] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788797, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.077413] env[62816]: DEBUG nova.compute.manager [req-fa0701f0-4c47-41b2-b87f-3258b60da347 req-b6a0fb92-8729-4877-b69a-b417ad0a47b3 service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Received event network-vif-plugged-a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1698.077660] env[62816]: DEBUG oslo_concurrency.lockutils [req-fa0701f0-4c47-41b2-b87f-3258b60da347 req-b6a0fb92-8729-4877-b69a-b417ad0a47b3 service nova] Acquiring lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.077944] env[62816]: DEBUG oslo_concurrency.lockutils [req-fa0701f0-4c47-41b2-b87f-3258b60da347 req-b6a0fb92-8729-4877-b69a-b417ad0a47b3 service nova] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.078051] env[62816]: DEBUG oslo_concurrency.lockutils [req-fa0701f0-4c47-41b2-b87f-3258b60da347 req-b6a0fb92-8729-4877-b69a-b417ad0a47b3 service nova] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.078284] env[62816]: DEBUG nova.compute.manager [req-fa0701f0-4c47-41b2-b87f-3258b60da347 req-b6a0fb92-8729-4877-b69a-b417ad0a47b3 service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] No waiting events found dispatching network-vif-plugged-a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1698.078406] env[62816]: WARNING nova.compute.manager [req-fa0701f0-4c47-41b2-b87f-3258b60da347 req-b6a0fb92-8729-4877-b69a-b417ad0a47b3 service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Received unexpected event network-vif-plugged-a22f6d4b-1226-4202-95b7-f25e7108759f for instance with vm_state building and task_state spawning. [ 1698.128606] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.129136] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1698.131742] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.808s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.133521] env[62816]: INFO nova.compute.claims [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1698.163564] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Successfully updated port: a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1698.328079] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.328079] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.328079] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.328382] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.328533] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.330644] env[62816]: INFO nova.compute.manager [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Terminating instance [ 1698.332425] env[62816]: DEBUG nova.compute.manager [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1698.332618] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1698.333468] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f353bea7-786a-450b-b550-01682a8b3166 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.342259] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1698.342496] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f510b2e7-7937-4b8f-bf23-d9a41f61890e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.350015] env[62816]: DEBUG oslo_vmware.api [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1698.350015] env[62816]: value = "task-1788800" [ 1698.350015] env[62816]: _type = "Task" [ 1698.350015] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.359302] env[62816]: DEBUG oslo_vmware.api [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.537287] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788798, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.545956] env[62816]: DEBUG oslo_vmware.api [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788797, 'name': PowerOnVM_Task, 'duration_secs': 0.691805} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.546191] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1698.546370] env[62816]: INFO nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1698.546550] env[62816]: DEBUG nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1698.547341] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8558a553-e5cc-4c2e-b23f-ed5bff374a0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.637619] env[62816]: DEBUG nova.compute.utils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1698.640973] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1698.641164] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1698.664269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "refresh_cache-679ce8d3-a57c-4620-81bc-ee8deea4bc8e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1698.664416] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "refresh_cache-679ce8d3-a57c-4620-81bc-ee8deea4bc8e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1698.664565] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1698.696506] env[62816]: DEBUG nova.policy [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5a5304d5b9c42a18b08663b3249667c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4b7a9cb88e246bc864edfe211953dea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1698.862623] env[62816]: DEBUG oslo_vmware.api [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788800, 'name': PowerOffVM_Task, 'duration_secs': 0.286199} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.863322] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1698.863461] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1698.863815] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cdce6e83-9afc-434c-98a1-385ae7dfde2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.961774] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1698.961774] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1698.961774] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Deleting the datastore file [datastore1] f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1698.962406] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d31dd98-79cd-4139-9ef1-e3432505f6ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.969014] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Successfully created port: 07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1698.973526] env[62816]: DEBUG oslo_vmware.api [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for the task: (returnval){ [ 1698.973526] env[62816]: value = "task-1788802" [ 1698.973526] env[62816]: _type = "Task" [ 1698.973526] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.982435] env[62816]: DEBUG oslo_vmware.api [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788802, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.040287] env[62816]: DEBUG oslo_vmware.api [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788798, 'name': RemoveSnapshot_Task, 'duration_secs': 0.907939} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.040287] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1699.068240] env[62816]: INFO nova.compute.manager [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Took 20.90 seconds to build instance. [ 1699.141936] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1699.219938] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1699.389567] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Updating instance_info_cache with network_info: [{"id": "a22f6d4b-1226-4202-95b7-f25e7108759f", "address": "fa:16:3e:9c:63:ae", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22f6d4b-12", "ovs_interfaceid": "a22f6d4b-1226-4202-95b7-f25e7108759f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.416579] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe4ec55-09ef-4bde-953a-075ac8b62f14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.423470] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de4f276-4559-4d5c-8983-207f50e94049 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.454503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134d487c-db27-42b9-b79c-9e0ea44eb30a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.462302] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42c60059-76d5-4c49-bc67-222c7b342004 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.477871] env[62816]: DEBUG nova.compute.provider_tree [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.488257] env[62816]: DEBUG oslo_vmware.api [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Task: {'id': task-1788802, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318044} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.489049] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1699.489243] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1699.489420] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1699.489589] env[62816]: INFO nova.compute.manager [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1699.489819] env[62816]: DEBUG oslo.service.loopingcall [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1699.489999] env[62816]: DEBUG nova.compute.manager [-] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1699.490102] env[62816]: DEBUG nova.network.neutron [-] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1700.269895] env[62816]: WARNING nova.compute.manager [None req-27a7755a-07fc-43b1-8314-66acd25f9824 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Image not found during snapshot: nova.exception.ImageNotFound: Image 545b7d14-7ff2-4d9b-bd3d-749658ab05db could not be found. [ 1700.274389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b8871f5f-6216-4a3a-b10a-052d1a7cbe35 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "4ab07a21-2685-42bc-af13-b95473993d6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.116s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.277546] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "refresh_cache-679ce8d3-a57c-4620-81bc-ee8deea4bc8e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.280544] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Instance network_info: |[{"id": "a22f6d4b-1226-4202-95b7-f25e7108759f", "address": "fa:16:3e:9c:63:ae", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22f6d4b-12", "ovs_interfaceid": "a22f6d4b-1226-4202-95b7-f25e7108759f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1700.280544] env[62816]: DEBUG nova.scheduler.client.report [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.286472] env[62816]: DEBUG nova.compute.manager [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Received event network-changed-a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1700.286472] env[62816]: DEBUG nova.compute.manager [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Refreshing instance network info cache due to event network-changed-a22f6d4b-1226-4202-95b7-f25e7108759f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1700.286472] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] Acquiring lock "refresh_cache-679ce8d3-a57c-4620-81bc-ee8deea4bc8e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.286472] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] Acquired lock "refresh_cache-679ce8d3-a57c-4620-81bc-ee8deea4bc8e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.286472] env[62816]: DEBUG nova.network.neutron [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Refreshing network info cache for port a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1700.289336] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:63:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9bc2632-36f9-4912-8782-8bbb789f909d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a22f6d4b-1226-4202-95b7-f25e7108759f', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1700.300031] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Creating folder: Project (e4b7a9cb88e246bc864edfe211953dea). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1700.300031] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-359dc3a0-fb9b-43f6-8ee9-999f5605424c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.315263] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Created folder: Project (e4b7a9cb88e246bc864edfe211953dea) in parent group-v370905. [ 1700.315421] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Creating folder: Instances. Parent ref: group-v371123. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1700.315620] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-99d37c92-20f2-4b2f-8b49-772121a4553d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.327980] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Created folder: Instances in parent group-v371123. [ 1700.328281] env[62816]: DEBUG oslo.service.loopingcall [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.328577] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1700.328848] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-440fa4da-e756-46a9-bfe9-42c79d197f68 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.352322] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1700.352322] env[62816]: value = "task-1788805" [ 1700.352322] env[62816]: _type = "Task" [ 1700.352322] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.360411] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788805, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.580789] env[62816]: DEBUG nova.compute.manager [req-cd78a0fd-ae61-4bb8-bbb7-4f8587bb512b req-ead53a76-44c1-4cfc-bfb3-7998a2fb6713 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Received event network-vif-deleted-9298a5b6-bb53-483a-aedc-c756f3d8484d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1700.580975] env[62816]: INFO nova.compute.manager [req-cd78a0fd-ae61-4bb8-bbb7-4f8587bb512b req-ead53a76-44c1-4cfc-bfb3-7998a2fb6713 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Neutron deleted interface 9298a5b6-bb53-483a-aedc-c756f3d8484d; detaching it from the instance and deleting it from the info cache [ 1700.581151] env[62816]: DEBUG nova.network.neutron [req-cd78a0fd-ae61-4bb8-bbb7-4f8587bb512b req-ead53a76-44c1-4cfc-bfb3-7998a2fb6713 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.790162] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1700.793203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.661s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.793824] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1700.798633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.090s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.799154] env[62816]: DEBUG nova.objects.instance [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lazy-loading 'resources' on Instance uuid 8ccce660-6c41-412d-99ac-65ca7915d728 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.811387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.811505] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.811959] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1700.812080] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.812315] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.818834] env[62816]: INFO nova.compute.manager [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Terminating instance [ 1700.822077] env[62816]: DEBUG nova.compute.manager [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1700.822332] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.823803] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df575ed-8b2c-400a-a7e4-273e2d990fd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.835420] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.838279] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1700.838564] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1700.838728] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1700.838967] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1700.839154] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1700.839309] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1700.839580] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1700.839765] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1700.839935] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1700.840229] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1700.840367] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1700.840587] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9eace3ee-f71f-4f09-906b-ba1794500e8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.843296] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9445dc64-5a8b-45fe-aa6f-c7fd1bbf3264 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.861775] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446f5769-83c7-4095-bd07-b2140a25dfc9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.868367] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1700.868367] env[62816]: value = "task-1788806" [ 1700.868367] env[62816]: _type = "Task" [ 1700.868367] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.886081] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788805, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.892199] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788806, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.044772] env[62816]: DEBUG nova.network.neutron [-] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.085031] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ebe4c11a-1249-4c85-91a4-2c1f800cda0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.097216] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2244467-b45e-4739-8dd0-041d743f67b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.144264] env[62816]: DEBUG nova.compute.manager [req-cd78a0fd-ae61-4bb8-bbb7-4f8587bb512b req-ead53a76-44c1-4cfc-bfb3-7998a2fb6713 service nova] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Detach interface failed, port_id=9298a5b6-bb53-483a-aedc-c756f3d8484d, reason: Instance f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1701.152044] env[62816]: DEBUG nova.network.neutron [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Updated VIF entry in instance network info cache for port a22f6d4b-1226-4202-95b7-f25e7108759f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1701.152133] env[62816]: DEBUG nova.network.neutron [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Updating instance_info_cache with network_info: [{"id": "a22f6d4b-1226-4202-95b7-f25e7108759f", "address": "fa:16:3e:9c:63:ae", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22f6d4b-12", "ovs_interfaceid": "a22f6d4b-1226-4202-95b7-f25e7108759f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.247820] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Successfully updated port: 07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1701.304031] env[62816]: DEBUG nova.compute.utils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1701.306541] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1701.307203] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1701.367577] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788805, 'name': CreateVM_Task, 'duration_secs': 0.612439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.369993] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1701.371476] env[62816]: DEBUG nova.policy [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5a5304d5b9c42a18b08663b3249667c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4b7a9cb88e246bc864edfe211953dea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1701.373321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.373533] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.373777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1701.376910] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a781aa5-d5a1-40bd-a85a-989bcbbd81fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.386502] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788806, 'name': PowerOffVM_Task, 'duration_secs': 0.19344} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.386797] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1701.386797] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cbc431-37a2-7b54-89e9-9a314c9ba706" [ 1701.386797] env[62816]: _type = "Task" [ 1701.386797] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.389339] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1701.389521] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1701.390759] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d639a3e7-1f41-489e-b392-bdbec6456b37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.403608] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cbc431-37a2-7b54-89e9-9a314c9ba706, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.484320] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1701.484619] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1701.484853] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleting the datastore file [datastore1] 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1701.485569] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e035c25-444c-4a5e-b4db-9964507aacd0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.494641] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for the task: (returnval){ [ 1701.494641] env[62816]: value = "task-1788808" [ 1701.494641] env[62816]: _type = "Task" [ 1701.494641] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.503370] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788808, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.551141] env[62816]: INFO nova.compute.manager [-] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Took 2.06 seconds to deallocate network for instance. [ 1701.594445] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1e4899-30c4-45d1-a1e2-f2a0f72f71cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.603776] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337e6a9f-6f05-4d2e-87bb-337614be1ebf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.636660] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b564c4e-05e5-4052-8e78-1c3a0abd72eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.647039] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f80445e-bdfe-4384-9047-0f873648e4bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.661414] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] Releasing lock "refresh_cache-679ce8d3-a57c-4620-81bc-ee8deea4bc8e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.661690] env[62816]: DEBUG nova.compute.manager [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Received event network-changed-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1701.661860] env[62816]: DEBUG nova.compute.manager [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Refreshing instance network info cache due to event network-changed-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1701.662079] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] Acquiring lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.662222] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] Acquired lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.662427] env[62816]: DEBUG nova.network.neutron [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Refreshing network info cache for port c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1701.664254] env[62816]: DEBUG nova.compute.provider_tree [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.717300] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Successfully created port: 09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1701.750490] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "refresh_cache-9ab4e631-5b31-4b37-9b49-4f0423286752" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.750651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "refresh_cache-9ab4e631-5b31-4b37-9b49-4f0423286752" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.750804] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1701.807267] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1701.901156] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cbc431-37a2-7b54-89e9-9a314c9ba706, 'name': SearchDatastore_Task, 'duration_secs': 0.038656} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.901457] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.901690] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1701.901917] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.902072] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.902255] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1701.902515] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57c7968a-6e27-4554-9ff6-f2a488b94092 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.918454] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1701.918674] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1701.919409] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5bdda1b-4fc8-4afe-b675-0f3a5835ca65 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.925283] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1701.925283] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52071cdc-5bcd-b5b3-e3ff-a5946c987bf7" [ 1701.925283] env[62816]: _type = "Task" [ 1701.925283] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.935041] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52071cdc-5bcd-b5b3-e3ff-a5946c987bf7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.005041] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788808, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.064741] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.138473] env[62816]: DEBUG nova.compute.manager [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Received event network-vif-plugged-07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1702.138741] env[62816]: DEBUG oslo_concurrency.lockutils [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] Acquiring lock "9ab4e631-5b31-4b37-9b49-4f0423286752-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.138909] env[62816]: DEBUG oslo_concurrency.lockutils [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.139093] env[62816]: DEBUG oslo_concurrency.lockutils [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.139261] env[62816]: DEBUG nova.compute.manager [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] No waiting events found dispatching network-vif-plugged-07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1702.139421] env[62816]: WARNING nova.compute.manager [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Received unexpected event network-vif-plugged-07bf7c71-9e13-40e5-b267-965611379c35 for instance with vm_state building and task_state spawning. [ 1702.139576] env[62816]: DEBUG nova.compute.manager [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Received event network-changed-07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1702.139728] env[62816]: DEBUG nova.compute.manager [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Refreshing instance network info cache due to event network-changed-07bf7c71-9e13-40e5-b267-965611379c35. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1702.139888] env[62816]: DEBUG oslo_concurrency.lockutils [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] Acquiring lock "refresh_cache-9ab4e631-5b31-4b37-9b49-4f0423286752" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.147253] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.147487] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.167494] env[62816]: DEBUG nova.scheduler.client.report [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1702.284885] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1702.293355] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "9972b167-a950-4dba-ac02-068f66300053" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.293588] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "9972b167-a950-4dba-ac02-068f66300053" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.403654] env[62816]: DEBUG nova.network.neutron [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updated VIF entry in instance network info cache for port c1c7e341-ffdc-440b-8b2a-6dff7559b1bd. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1702.404047] env[62816]: DEBUG nova.network.neutron [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [{"id": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "address": "fa:16:3e:de:ed:af", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c7e341-ff", "ovs_interfaceid": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.415521] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Updating instance_info_cache with network_info: [{"id": "07bf7c71-9e13-40e5-b267-965611379c35", "address": "fa:16:3e:8a:3a:28", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07bf7c71-9e", "ovs_interfaceid": "07bf7c71-9e13-40e5-b267-965611379c35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.436924] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52071cdc-5bcd-b5b3-e3ff-a5946c987bf7, 'name': SearchDatastore_Task, 'duration_secs': 0.030359} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.437717] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74703a54-1c32-42eb-8d3d-33d35ebfaf6d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.442936] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1702.442936] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237dcc5-7df5-4b53-3170-10d224f4ddad" [ 1702.442936] env[62816]: _type = "Task" [ 1702.442936] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.450963] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5237dcc5-7df5-4b53-3170-10d224f4ddad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.504677] env[62816]: DEBUG oslo_vmware.api [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Task: {'id': task-1788808, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.609714} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.504929] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1702.505124] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1702.505294] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1702.505476] env[62816]: INFO nova.compute.manager [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1702.505736] env[62816]: DEBUG oslo.service.loopingcall [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.505928] env[62816]: DEBUG nova.compute.manager [-] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1702.506035] env[62816]: DEBUG nova.network.neutron [-] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1702.650218] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1702.672598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.874s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.675029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.168s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.675241] env[62816]: DEBUG nova.objects.instance [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lazy-loading 'resources' on Instance uuid 0e0261fe-4376-487c-9d54-c4f37577409c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1702.700778] env[62816]: INFO nova.scheduler.client.report [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Deleted allocations for instance 8ccce660-6c41-412d-99ac-65ca7915d728 [ 1702.795879] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1702.816832] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1702.844129] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1702.844383] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1702.844538] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1702.844734] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1702.844864] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1702.845015] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1702.845232] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1702.845422] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1702.845624] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1702.845813] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1702.845963] env[62816]: DEBUG nova.virt.hardware [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1702.847094] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2512b79-4e74-4c5f-a36e-ce87799d563d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.855804] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bfab25-f613-4931-839b-8836ad8f6dd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.906611] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b30d15f-a447-424d-b44a-266a7710d066 req-fc7d1fd6-885a-47d8-b566-cbd98b5de99b service nova] Releasing lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.918378] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "refresh_cache-9ab4e631-5b31-4b37-9b49-4f0423286752" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.918695] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Instance network_info: |[{"id": "07bf7c71-9e13-40e5-b267-965611379c35", "address": "fa:16:3e:8a:3a:28", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07bf7c71-9e", "ovs_interfaceid": "07bf7c71-9e13-40e5-b267-965611379c35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1702.918988] env[62816]: DEBUG oslo_concurrency.lockutils [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] Acquired lock "refresh_cache-9ab4e631-5b31-4b37-9b49-4f0423286752" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.919211] env[62816]: DEBUG nova.network.neutron [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Refreshing network info cache for port 07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1702.920355] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:3a:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9bc2632-36f9-4912-8782-8bbb789f909d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07bf7c71-9e13-40e5-b267-965611379c35', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1702.927643] env[62816]: DEBUG oslo.service.loopingcall [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.930477] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1702.930933] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e895ed53-6d16-4f63-8bd7-4f36819d54c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.955699] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5237dcc5-7df5-4b53-3170-10d224f4ddad, 'name': SearchDatastore_Task, 'duration_secs': 0.014124} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.957034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.958022] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 679ce8d3-a57c-4620-81bc-ee8deea4bc8e/679ce8d3-a57c-4620-81bc-ee8deea4bc8e.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1702.958022] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1702.958022] env[62816]: value = "task-1788809" [ 1702.958022] env[62816]: _type = "Task" [ 1702.958022] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.958022] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42de7d0e-6092-4adc-80d0-a2c05fc18375 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.967989] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788809, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.969325] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1702.969325] env[62816]: value = "task-1788810" [ 1702.969325] env[62816]: _type = "Task" [ 1702.969325] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.979251] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.182389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.210421] env[62816]: DEBUG nova.network.neutron [-] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.213040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-38cb7005-ca1f-4025-a3ea-3df3e62c40e5 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393 tempest-FloatingIPsAssociationNegativeTestJSON-1694416393-project-member] Lock "8ccce660-6c41-412d-99ac-65ca7915d728" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.968s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.273434] env[62816]: DEBUG nova.network.neutron [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Updated VIF entry in instance network info cache for port 07bf7c71-9e13-40e5-b267-965611379c35. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.273911] env[62816]: DEBUG nova.network.neutron [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Updating instance_info_cache with network_info: [{"id": "07bf7c71-9e13-40e5-b267-965611379c35", "address": "fa:16:3e:8a:3a:28", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07bf7c71-9e", "ovs_interfaceid": "07bf7c71-9e13-40e5-b267-965611379c35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.326024] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.403487] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Successfully updated port: 09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1703.471761] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788809, 'name': CreateVM_Task, 'duration_secs': 0.472439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.475720] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1703.476523] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.476660] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.476991] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1703.477711] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08adf691-7996-4f91-8dda-bbfa736ed0ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.486682] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788810, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.490353] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1703.490353] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5246dc85-3048-be8e-ba89-2c53131abcfb" [ 1703.490353] env[62816]: _type = "Task" [ 1703.490353] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.505230] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5246dc85-3048-be8e-ba89-2c53131abcfb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.522764] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6068471b-9052-4f03-8712-67dc0acaddea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.530625] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d567bc79-9f2c-4731-bd36-ff8cc028fb1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.563996] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93795260-c9e8-4aad-8d06-d09a9e4b57ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.571800] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622058ae-d532-4167-b679-5d8c652e43da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.585350] env[62816]: DEBUG nova.compute.provider_tree [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.712950] env[62816]: INFO nova.compute.manager [-] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Took 1.21 seconds to deallocate network for instance. [ 1703.780687] env[62816]: DEBUG oslo_concurrency.lockutils [req-421af111-2d27-4aaf-8743-24272bcdf8e9 req-513aefb1-ba8f-4186-9747-e06d2b3c221b service nova] Releasing lock "refresh_cache-9ab4e631-5b31-4b37-9b49-4f0423286752" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.907345] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "refresh_cache-2583e2ba-8904-420c-a417-d6af71bfa9ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.907345] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "refresh_cache-2583e2ba-8904-420c-a417-d6af71bfa9ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.907345] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.982019] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581274} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.982286] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 679ce8d3-a57c-4620-81bc-ee8deea4bc8e/679ce8d3-a57c-4620-81bc-ee8deea4bc8e.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1703.982491] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1703.982733] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4d129ce1-89ff-495b-8b06-b2d41bcdc8a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.991321] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1703.991321] env[62816]: value = "task-1788811" [ 1703.991321] env[62816]: _type = "Task" [ 1703.991321] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.003476] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5246dc85-3048-be8e-ba89-2c53131abcfb, 'name': SearchDatastore_Task, 'duration_secs': 0.027742} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.006365] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.006625] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1704.006866] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.007107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.007311] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.007629] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.007835] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1787026-4492-4490-b545-256edd5064c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.016398] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.016592] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1704.017296] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6820fb39-7c47-4fe2-9933-d601e60aabee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.022676] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1704.022676] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cb8a54-f2c9-0133-8a8d-cad7f864c9ea" [ 1704.022676] env[62816]: _type = "Task" [ 1704.022676] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.032653] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb8a54-f2c9-0133-8a8d-cad7f864c9ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.091270] env[62816]: DEBUG nova.scheduler.client.report [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1704.174114] env[62816]: DEBUG nova.compute.manager [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Received event network-vif-deleted-479053d9-125e-49d2-94b6-1c48422ea761 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.174114] env[62816]: DEBUG nova.compute.manager [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Received event network-vif-plugged-09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.175293] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] Acquiring lock "2583e2ba-8904-420c-a417-d6af71bfa9ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.175522] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.175706] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.175828] env[62816]: DEBUG nova.compute.manager [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] No waiting events found dispatching network-vif-plugged-09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1704.176007] env[62816]: WARNING nova.compute.manager [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Received unexpected event network-vif-plugged-09a63dbd-34b4-4340-887c-035cebe037ff for instance with vm_state building and task_state spawning. [ 1704.176183] env[62816]: DEBUG nova.compute.manager [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Received event network-changed-09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1704.176339] env[62816]: DEBUG nova.compute.manager [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Refreshing instance network info cache due to event network-changed-09a63dbd-34b4-4340-887c-035cebe037ff. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1704.176504] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] Acquiring lock "refresh_cache-2583e2ba-8904-420c-a417-d6af71bfa9ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.219785] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.438207] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1704.504884] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068518} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.505181] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1704.506068] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223fcc92-1302-4084-81f6-f6e5326c0fca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.528994] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 679ce8d3-a57c-4620-81bc-ee8deea4bc8e/679ce8d3-a57c-4620-81bc-ee8deea4bc8e.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1704.531958] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d578580f-f182-4541-928f-3718e8f9e7c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.555907] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb8a54-f2c9-0133-8a8d-cad7f864c9ea, 'name': SearchDatastore_Task, 'duration_secs': 0.01192} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.557863] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1704.557863] env[62816]: value = "task-1788812" [ 1704.557863] env[62816]: _type = "Task" [ 1704.557863] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.558070] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-322a2beb-3d2c-4ee9-bd63-89122de7716e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.569134] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788812, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.570104] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1704.570104] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52870af6-2e21-f3b0-ace5-a25725691330" [ 1704.570104] env[62816]: _type = "Task" [ 1704.570104] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.579316] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52870af6-2e21-f3b0-ace5-a25725691330, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.596656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.922s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.599297] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.535s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.599354] env[62816]: DEBUG nova.objects.instance [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lazy-loading 'resources' on Instance uuid f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1704.623362] env[62816]: INFO nova.scheduler.client.report [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Deleted allocations for instance 0e0261fe-4376-487c-9d54-c4f37577409c [ 1704.628206] env[62816]: DEBUG nova.network.neutron [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Updating instance_info_cache with network_info: [{"id": "09a63dbd-34b4-4340-887c-035cebe037ff", "address": "fa:16:3e:ed:50:b0", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09a63dbd-34", "ovs_interfaceid": "09a63dbd-34b4-4340-887c-035cebe037ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.070885] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788812, 'name': ReconfigVM_Task, 'duration_secs': 0.311025} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.074070] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 679ce8d3-a57c-4620-81bc-ee8deea4bc8e/679ce8d3-a57c-4620-81bc-ee8deea4bc8e.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1705.074967] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eda41d0d-3a3e-4227-a19d-c1857082084b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.082968] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52870af6-2e21-f3b0-ace5-a25725691330, 'name': SearchDatastore_Task, 'duration_secs': 0.01974} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.084321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.084591] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9ab4e631-5b31-4b37-9b49-4f0423286752/9ab4e631-5b31-4b37-9b49-4f0423286752.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1705.084908] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1705.084908] env[62816]: value = "task-1788813" [ 1705.084908] env[62816]: _type = "Task" [ 1705.084908] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.085133] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53f8bfca-7482-4c62-9b8d-f866787fd080 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.095880] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788813, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.097054] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1705.097054] env[62816]: value = "task-1788814" [ 1705.097054] env[62816]: _type = "Task" [ 1705.097054] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.107181] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.132600] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "refresh_cache-2583e2ba-8904-420c-a417-d6af71bfa9ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.132913] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Instance network_info: |[{"id": "09a63dbd-34b4-4340-887c-035cebe037ff", "address": "fa:16:3e:ed:50:b0", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09a63dbd-34", "ovs_interfaceid": "09a63dbd-34b4-4340-887c-035cebe037ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1705.133249] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] Acquired lock "refresh_cache-2583e2ba-8904-420c-a417-d6af71bfa9ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.133445] env[62816]: DEBUG nova.network.neutron [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Refreshing network info cache for port 09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1705.138132] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ed:50:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c9bc2632-36f9-4912-8782-8bbb789f909d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09a63dbd-34b4-4340-887c-035cebe037ff', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1705.143387] env[62816]: DEBUG oslo.service.loopingcall [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1705.143907] env[62816]: DEBUG oslo_concurrency.lockutils [None req-def58b9d-ccb6-4fa5-83c8-32d34784715c tempest-ServersAdminTestJSON-1690259284 tempest-ServersAdminTestJSON-1690259284-project-member] Lock "0e0261fe-4376-487c-9d54-c4f37577409c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.595s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1705.145904] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1705.146159] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42845227-9da8-4d73-b0b8-bec9926ce07a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.173250] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1705.173250] env[62816]: value = "task-1788815" [ 1705.173250] env[62816]: _type = "Task" [ 1705.173250] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.183411] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788815, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.406796] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5daa4cd6-fdb4-408b-9beb-7cf0a05c15c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.416727] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26cc6345-461f-4459-8d8f-87acf069027e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.452579] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffde8be7-1b8f-4f16-9d5f-6d43e91721ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.463015] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35ff6c6e-c820-4d21-820c-3df4bb5b68f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.479139] env[62816]: DEBUG nova.compute.provider_tree [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.601119] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788813, 'name': Rename_Task, 'duration_secs': 0.17196} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.608748] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1705.608748] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83b11821-2e81-4bb1-b310-e7898a1d0cb3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.614223] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788814, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.617051] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1705.617051] env[62816]: value = "task-1788816" [ 1705.617051] env[62816]: _type = "Task" [ 1705.617051] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.626505] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.685422] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788815, 'name': CreateVM_Task, 'duration_secs': 0.399444} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.685591] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1705.686264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.686429] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.686746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1705.687010] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70ff95d2-edd8-4172-ba99-7394551274e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.692145] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1705.692145] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521a17cf-b2c2-3fac-dd25-e0bea66f665d" [ 1705.692145] env[62816]: _type = "Task" [ 1705.692145] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.703955] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521a17cf-b2c2-3fac-dd25-e0bea66f665d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.981762] env[62816]: DEBUG nova.scheduler.client.report [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1705.998275] env[62816]: DEBUG nova.network.neutron [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Updated VIF entry in instance network info cache for port 09a63dbd-34b4-4340-887c-035cebe037ff. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1705.998724] env[62816]: DEBUG nova.network.neutron [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Updating instance_info_cache with network_info: [{"id": "09a63dbd-34b4-4340-887c-035cebe037ff", "address": "fa:16:3e:ed:50:b0", "network": {"id": "4c2cf208-308b-4ba0-aa93-2865b2221036", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1332901771-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4b7a9cb88e246bc864edfe211953dea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c9bc2632-36f9-4912-8782-8bbb789f909d", "external-id": "nsx-vlan-transportzone-897", "segmentation_id": 897, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09a63dbd-34", "ovs_interfaceid": "09a63dbd-34b4-4340-887c-035cebe037ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.113079] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788814, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60651} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.113368] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 9ab4e631-5b31-4b37-9b49-4f0423286752/9ab4e631-5b31-4b37-9b49-4f0423286752.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.113746] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.114043] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a9a92af-3985-4754-b47e-d09066fee317 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.123937] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1706.123937] env[62816]: value = "task-1788817" [ 1706.123937] env[62816]: _type = "Task" [ 1706.123937] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.130108] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788816, 'name': PowerOnVM_Task, 'duration_secs': 0.488192} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.130756] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1706.130995] env[62816]: INFO nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Took 8.35 seconds to spawn the instance on the hypervisor. [ 1706.131229] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1706.132058] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f04688-737e-4b93-ada2-a17444aac2bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.137784] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788817, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.207215] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521a17cf-b2c2-3fac-dd25-e0bea66f665d, 'name': SearchDatastore_Task, 'duration_secs': 0.013075} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.207881] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.208139] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1706.208449] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1706.208597] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1706.208781] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1706.209094] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22b9fa15-1011-40ae-9f40-b426b1cc73f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.220452] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1706.220745] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1706.221608] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a076f2b-7e77-4a2d-a76b-2c6bc9ac7214 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.228878] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1706.228878] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529a81be-4dae-1383-0613-ed00f0ec6a00" [ 1706.228878] env[62816]: _type = "Task" [ 1706.228878] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.238287] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529a81be-4dae-1383-0613-ed00f0ec6a00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.486687] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.888s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.489413] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.307s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.490982] env[62816]: INFO nova.compute.claims [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1706.502518] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb1d53b3-8c4c-46eb-a151-589d319a6223 req-01694109-374d-4139-8868-16b85b44dc0f service nova] Releasing lock "refresh_cache-2583e2ba-8904-420c-a417-d6af71bfa9ac" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.517314] env[62816]: INFO nova.scheduler.client.report [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Deleted allocations for instance f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72 [ 1706.634586] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788817, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.101701} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.634937] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1706.635770] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ddc1a7-f2cf-4bb6-9943-9f1e3c9d58df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.662428] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 9ab4e631-5b31-4b37-9b49-4f0423286752/9ab4e631-5b31-4b37-9b49-4f0423286752.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1706.664347] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63e08ae3-1b60-4d2c-a433-cade5e863d37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.679138] env[62816]: INFO nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Took 17.43 seconds to build instance. [ 1706.687549] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1706.687549] env[62816]: value = "task-1788818" [ 1706.687549] env[62816]: _type = "Task" [ 1706.687549] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.703267] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788818, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.739342] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529a81be-4dae-1383-0613-ed00f0ec6a00, 'name': SearchDatastore_Task, 'duration_secs': 0.012357} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.740213] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6b72d5b-c507-4d73-b16a-cc3e39e0ab79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.746111] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1706.746111] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a1f585-f3e4-0e7c-f2c6-321d2dce278a" [ 1706.746111] env[62816]: _type = "Task" [ 1706.746111] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.753823] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a1f585-f3e4-0e7c-f2c6-321d2dce278a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.026609] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4165460e-bbfc-47db-8199-b05b338c9338 tempest-ServerMetadataTestJSON-794266469 tempest-ServerMetadataTestJSON-794266469-project-member] Lock "f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.698s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.181698] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.944s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.200953] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788818, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.257473] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a1f585-f3e4-0e7c-f2c6-321d2dce278a, 'name': SearchDatastore_Task, 'duration_secs': 0.01072} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.257810] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.258147] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 2583e2ba-8904-420c-a417-d6af71bfa9ac/2583e2ba-8904-420c-a417-d6af71bfa9ac.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1707.258429] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a61fb51e-118c-4fe4-ac72-9026b99a94b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.266825] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1707.266825] env[62816]: value = "task-1788819" [ 1707.266825] env[62816]: _type = "Task" [ 1707.266825] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.275343] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788819, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.701225] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788818, 'name': ReconfigVM_Task, 'duration_secs': 0.711324} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.702611] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 9ab4e631-5b31-4b37-9b49-4f0423286752/9ab4e631-5b31-4b37-9b49-4f0423286752.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1707.702611] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1037f653-d2fe-4a7a-85b2-38e48a4fe42f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.710606] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1707.710606] env[62816]: value = "task-1788820" [ 1707.710606] env[62816]: _type = "Task" [ 1707.710606] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.719958] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788820, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.754525] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a356e8ab-47c5-45c2-af4f-03aa1e14056e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.764276] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea57b05-4ac0-429f-9dd7-ebe3a7c76968 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.811307] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5596a91a-edbb-440b-9fa9-29077777264e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.814941] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788819, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.824328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa71a814-83c8-4ed1-abb7-208d4e658f52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.842344] env[62816]: DEBUG nova.compute.provider_tree [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.226406] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788820, 'name': Rename_Task, 'duration_secs': 0.218261} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.227063] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1708.227271] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d324f78-bbfe-4076-b819-67947bd6191e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.235499] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1708.235499] env[62816]: value = "task-1788821" [ 1708.235499] env[62816]: _type = "Task" [ 1708.235499] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.250947] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788821, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.278890] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.773947} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.279562] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 2583e2ba-8904-420c-a417-d6af71bfa9ac/2583e2ba-8904-420c-a417-d6af71bfa9ac.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1708.279720] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1708.280019] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ef21fcf-b5fc-4cab-bf50-c93c85548869 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.289742] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1708.289742] env[62816]: value = "task-1788822" [ 1708.289742] env[62816]: _type = "Task" [ 1708.289742] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.299757] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788822, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.349213] env[62816]: DEBUG nova.scheduler.client.report [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1708.746365] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788821, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.802032] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788822, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074755} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.802032] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1708.802823] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b4f37b-6344-457d-83ba-4512bfe7cc7b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.827201] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 2583e2ba-8904-420c-a417-d6af71bfa9ac/2583e2ba-8904-420c-a417-d6af71bfa9ac.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1708.827637] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2f3752d-3169-479b-bc6e-3f32d06bd963 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.849853] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1708.849853] env[62816]: value = "task-1788823" [ 1708.849853] env[62816]: _type = "Task" [ 1708.849853] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.853963] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.854789] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1708.857187] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.534s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.858717] env[62816]: INFO nova.compute.claims [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1708.867741] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788823, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.248527] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788821, 'name': PowerOnVM_Task, 'duration_secs': 0.702218} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.248876] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1709.249149] env[62816]: INFO nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1709.249376] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1709.250398] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb324fb-96e7-4f9a-899f-2f677fd07d73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.360522] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788823, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.364013] env[62816]: DEBUG nova.compute.utils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1709.368265] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1709.368527] env[62816]: DEBUG nova.network.neutron [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1709.425307] env[62816]: DEBUG nova.policy [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96acb678bd4641f49f24cdc456595705', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12767255c02a4e16ad13383fdb725593', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1709.771109] env[62816]: INFO nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Took 20.50 seconds to build instance. [ 1709.861714] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788823, 'name': ReconfigVM_Task, 'duration_secs': 0.605581} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.862090] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 2583e2ba-8904-420c-a417-d6af71bfa9ac/2583e2ba-8904-420c-a417-d6af71bfa9ac.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.862808] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98256a10-2cff-457a-986b-98cd319baf0e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.873156] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1709.876491] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1709.876491] env[62816]: value = "task-1788824" [ 1709.876491] env[62816]: _type = "Task" [ 1709.876491] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.877517] env[62816]: DEBUG nova.network.neutron [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Successfully created port: 3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1709.894124] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788824, 'name': Rename_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.130422] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96050e11-015c-4b8a-9fb9-33cbf8508518 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.138491] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791e0f19-a26d-4601-8576-10fc5d321121 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.171663] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040e0d0b-52c7-4706-8511-83927cf21a01 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.181866] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692c1f4a-9baa-42bb-a747-a4c7a3c5d8ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.197079] env[62816]: DEBUG nova.compute.provider_tree [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.271309] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.009s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.395629] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788824, 'name': Rename_Task, 'duration_secs': 0.27641} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.395982] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1710.396249] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f7197c6-0ea2-4ca3-bcff-b746fcfc8c8d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.405018] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1710.405018] env[62816]: value = "task-1788825" [ 1710.405018] env[62816]: _type = "Task" [ 1710.405018] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.414233] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.700641] env[62816]: DEBUG nova.scheduler.client.report [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1710.891436] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1710.918575] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788825, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.920953] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1710.921493] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1710.921711] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1710.922089] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1710.922322] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1710.922525] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1710.922783] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1710.922962] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1710.923152] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1710.923314] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1710.923490] env[62816]: DEBUG nova.virt.hardware [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1710.924275] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e165a2b-8d5d-4182-9dc7-45544baee012 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.933745] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fd83cb-888c-43da-88b6-980b85ffc8a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.206029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.348s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.206535] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1711.212801] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.992s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.212801] env[62816]: DEBUG nova.objects.instance [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lazy-loading 'resources' on Instance uuid 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1711.420019] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788825, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.673574] env[62816]: DEBUG nova.compute.manager [req-45c1b4dc-263a-41e0-bfa4-47ba990258fb req-b53213a6-8c05-4416-a6b2-452f35e6aba5 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received event network-vif-plugged-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1711.673815] env[62816]: DEBUG oslo_concurrency.lockutils [req-45c1b4dc-263a-41e0-bfa4-47ba990258fb req-b53213a6-8c05-4416-a6b2-452f35e6aba5 service nova] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.674056] env[62816]: DEBUG oslo_concurrency.lockutils [req-45c1b4dc-263a-41e0-bfa4-47ba990258fb req-b53213a6-8c05-4416-a6b2-452f35e6aba5 service nova] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.674257] env[62816]: DEBUG oslo_concurrency.lockutils [req-45c1b4dc-263a-41e0-bfa4-47ba990258fb req-b53213a6-8c05-4416-a6b2-452f35e6aba5 service nova] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.674442] env[62816]: DEBUG nova.compute.manager [req-45c1b4dc-263a-41e0-bfa4-47ba990258fb req-b53213a6-8c05-4416-a6b2-452f35e6aba5 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] No waiting events found dispatching network-vif-plugged-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1711.674582] env[62816]: WARNING nova.compute.manager [req-45c1b4dc-263a-41e0-bfa4-47ba990258fb req-b53213a6-8c05-4416-a6b2-452f35e6aba5 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received unexpected event network-vif-plugged-3f45a830-39df-4031-a603-7b72a5562ec6 for instance with vm_state building and task_state spawning. [ 1711.716305] env[62816]: DEBUG nova.compute.utils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1711.717984] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1711.718957] env[62816]: DEBUG nova.network.neutron [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1711.801378] env[62816]: DEBUG nova.policy [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27feea2974d047c6b77850d126079bcc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '75028a965cf549cda255a3bc0b000e9a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1711.915112] env[62816]: DEBUG oslo_vmware.api [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788825, 'name': PowerOnVM_Task, 'duration_secs': 1.201226} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.915388] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1711.915592] env[62816]: INFO nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Took 9.10 seconds to spawn the instance on the hypervisor. [ 1711.915780] env[62816]: DEBUG nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1711.916568] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cb118a-c97e-4f4d-ab48-b96234b089ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.930354] env[62816]: DEBUG nova.network.neutron [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Successfully updated port: 3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1711.998394] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27015ba-1187-47f9-bd46-715b87ce84b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.009300] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c890f8-8ed0-4408-90c2-5859acae32c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.048635] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8dc877f-d198-4818-8fa4-83cd8f21243a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.057217] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d76e34-f120-4902-9aae-25965ae25164 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.072974] env[62816]: DEBUG nova.compute.provider_tree [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.226883] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1712.326600] env[62816]: DEBUG nova.network.neutron [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Successfully created port: 6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1712.440123] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.440123] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.440123] env[62816]: DEBUG nova.network.neutron [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1712.443466] env[62816]: INFO nova.compute.manager [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Took 23.13 seconds to build instance. [ 1712.577490] env[62816]: DEBUG nova.scheduler.client.report [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1712.736895] env[62816]: INFO nova.virt.block_device [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Booting with volume b605cfce-b06c-4615-a606-12cb89b4a2d4 at /dev/sda [ 1712.778294] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ee32160-604e-40e5-9e6c-0edb3b632889 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.788646] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32ee187-1790-4cb4-ae2c-9c13e534db1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.830076] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb3c9754-f9c9-4fee-bf75-8dbd01f66bce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.846284] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49784c7c-f081-4172-bd0e-73ac2754668e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.895465] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7298191b-08ac-4e39-a38e-34d95829aaad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.903289] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b120bc1-873f-4d66-8d7a-baf4f98c9b22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.919527] env[62816]: DEBUG nova.virt.block_device [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updating existing volume attachment record: 76ccc3ca-fd03-4d20-9473-b38035808244 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1712.951664] env[62816]: DEBUG oslo_concurrency.lockutils [None req-389204a5-b786-4e97-8860-3cebb9bf9200 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.652s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.005649] env[62816]: DEBUG nova.network.neutron [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1713.087240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.112159] env[62816]: INFO nova.scheduler.client.report [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Deleted allocations for instance 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb [ 1713.239921] env[62816]: DEBUG nova.network.neutron [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.370225] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.370425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.370756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.370756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.371092] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.373230] env[62816]: INFO nova.compute.manager [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Terminating instance [ 1713.376045] env[62816]: DEBUG nova.compute.manager [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1713.376045] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1713.376045] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a948e02a-a16b-40ad-a241-1b2a9fa9079e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.385181] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1713.385181] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-341dff29-f165-4e8a-98d1-2a2dc3aa01f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.392726] env[62816]: DEBUG oslo_vmware.api [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1713.392726] env[62816]: value = "task-1788826" [ 1713.392726] env[62816]: _type = "Task" [ 1713.392726] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.403413] env[62816]: DEBUG oslo_vmware.api [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788826, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.623651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b25f62d2-2638-4398-88af-fe5124e64c24 tempest-ImagesTestJSON-1533817319 tempest-ImagesTestJSON-1533817319-project-member] Lock "3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.811s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.701131] env[62816]: DEBUG nova.compute.manager [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1713.702091] env[62816]: DEBUG nova.compute.manager [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing instance network info cache due to event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1713.702854] env[62816]: DEBUG oslo_concurrency.lockutils [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.743182] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.746021] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Instance network_info: |[{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1713.746021] env[62816]: DEBUG oslo_concurrency.lockutils [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.746021] env[62816]: DEBUG nova.network.neutron [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1713.746021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:2b:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f45a830-39df-4031-a603-7b72a5562ec6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1713.758366] env[62816]: DEBUG oslo.service.loopingcall [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1713.765069] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1713.765069] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed170d2e-fd73-4416-bf7e-03c0fa88de19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.788286] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1713.788286] env[62816]: value = "task-1788827" [ 1713.788286] env[62816]: _type = "Task" [ 1713.788286] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.801389] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788827, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.909896] env[62816]: DEBUG oslo_vmware.api [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788826, 'name': PowerOffVM_Task, 'duration_secs': 0.245511} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.912499] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.912499] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.912499] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91f56c10-3a49-4901-9223-9b9be885286f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.006143] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1714.006143] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1714.006463] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleting the datastore file [datastore1] 679ce8d3-a57c-4620-81bc-ee8deea4bc8e {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1714.007272] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5d2aaead-5fb2-4f67-95fb-6a4eb1e0f948 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.016180] env[62816]: DEBUG oslo_vmware.api [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1714.016180] env[62816]: value = "task-1788829" [ 1714.016180] env[62816]: _type = "Task" [ 1714.016180] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.031165] env[62816]: DEBUG oslo_vmware.api [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.304623] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788827, 'name': CreateVM_Task, 'duration_secs': 0.371326} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.304744] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1714.305442] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.306099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.307461] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1714.307461] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90a437f2-1a88-423d-82b6-3833b1ed1db3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.312722] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1714.312722] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5208b290-46bd-bd9a-364c-0dfbbfcd687e" [ 1714.312722] env[62816]: _type = "Task" [ 1714.312722] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.322697] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5208b290-46bd-bd9a-364c-0dfbbfcd687e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.347164] env[62816]: DEBUG nova.network.neutron [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updated VIF entry in instance network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1714.347605] env[62816]: DEBUG nova.network.neutron [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.530441] env[62816]: DEBUG oslo_vmware.api [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171033} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.530700] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.530897] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.531108] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.531274] env[62816]: INFO nova.compute.manager [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1714.531506] env[62816]: DEBUG oslo.service.loopingcall [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.531685] env[62816]: DEBUG nova.compute.manager [-] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1714.531779] env[62816]: DEBUG nova.network.neutron [-] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1714.574026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.574026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.610906] env[62816]: DEBUG nova.network.neutron [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Successfully updated port: 6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1714.824020] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5208b290-46bd-bd9a-364c-0dfbbfcd687e, 'name': SearchDatastore_Task, 'duration_secs': 0.010765} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.824344] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.824587] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1714.824828] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.826084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.826084] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1714.826084] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f7915519-a1df-4d4d-80c0-101c2c2db4fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.835126] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1714.835126] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1714.836149] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e196815f-5c4e-41a4-8182-504c67b7a6a4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.842693] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1714.842693] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f915d2-1635-b298-973f-a187f3c5b4fd" [ 1714.842693] env[62816]: _type = "Task" [ 1714.842693] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.852936] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f915d2-1635-b298-973f-a187f3c5b4fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.853488] env[62816]: DEBUG oslo_concurrency.lockutils [req-c5c8d1c8-9ec7-41a5-bb57-d31f000bf87a req-06b2635f-eb8d-4bde-9f91-b770f474a4f0 service nova] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.053182] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1715.053730] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1715.053943] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1715.054114] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1715.054301] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1715.054449] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1715.054596] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1715.054800] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1715.054958] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1715.055924] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1715.056145] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1715.056331] env[62816]: DEBUG nova.virt.hardware [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1715.057701] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d4d2fd-7850-404d-a7ed-63fa7b1e43ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.071475] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9159f5-b855-4666-9cb4-978e1b363f3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.083775] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1715.114328] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.114646] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquired lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.114901] env[62816]: DEBUG nova.network.neutron [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1715.240272] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "915127f6-2da7-4eab-a7cb-331a41d04d0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.240488] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.280304] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.280304] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.355043] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f915d2-1635-b298-973f-a187f3c5b4fd, 'name': SearchDatastore_Task, 'duration_secs': 0.010427} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.355967] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9eefa5c7-f4a6-4e6e-bb15-f596c77d6258 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.364589] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1715.364589] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5247d1b9-8a93-9088-67c5-cfc6202119f0" [ 1715.364589] env[62816]: _type = "Task" [ 1715.364589] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.380358] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5247d1b9-8a93-9088-67c5-cfc6202119f0, 'name': SearchDatastore_Task, 'duration_secs': 0.011438} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.380358] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.380358] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/b9e8af08-9579-4dbf-8ea1-35ffab75e159.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1715.380766] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-974a595d-4f95-487c-bed2-ae6a40fe62e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.389158] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1715.389158] env[62816]: value = "task-1788830" [ 1715.389158] env[62816]: _type = "Task" [ 1715.389158] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.397983] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.426827] env[62816]: DEBUG nova.network.neutron [-] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.610827] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.611636] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.612833] env[62816]: INFO nova.compute.claims [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1715.670441] env[62816]: DEBUG nova.network.neutron [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1715.743437] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1715.748988] env[62816]: DEBUG nova.compute.manager [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Received event network-vif-plugged-6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1715.749802] env[62816]: DEBUG oslo_concurrency.lockutils [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] Acquiring lock "9972b167-a950-4dba-ac02-068f66300053-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.750126] env[62816]: DEBUG oslo_concurrency.lockutils [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] Lock "9972b167-a950-4dba-ac02-068f66300053-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.750364] env[62816]: DEBUG oslo_concurrency.lockutils [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] Lock "9972b167-a950-4dba-ac02-068f66300053-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.751027] env[62816]: DEBUG nova.compute.manager [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] No waiting events found dispatching network-vif-plugged-6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1715.755016] env[62816]: WARNING nova.compute.manager [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Received unexpected event network-vif-plugged-6aab8d5d-a76e-4738-8cab-9e6b59a195f0 for instance with vm_state building and task_state spawning. [ 1715.755016] env[62816]: DEBUG nova.compute.manager [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Received event network-changed-6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1715.755016] env[62816]: DEBUG nova.compute.manager [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Refreshing instance network info cache due to event network-changed-6aab8d5d-a76e-4738-8cab-9e6b59a195f0. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1715.755016] env[62816]: DEBUG oslo_concurrency.lockutils [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] Acquiring lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.783122] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1715.885257] env[62816]: DEBUG nova.network.neutron [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updating instance_info_cache with network_info: [{"id": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "address": "fa:16:3e:05:4a:15", "network": {"id": "28706faa-7199-41a4-996b-ae62937de3e1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-432903475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75028a965cf549cda255a3bc0b000e9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aab8d5d-a7", "ovs_interfaceid": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.899666] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490129} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.900628] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/b9e8af08-9579-4dbf-8ea1-35ffab75e159.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1715.900869] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1715.901196] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fa08e57-e538-4a65-be35-3a1cff6bc25d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.909694] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1715.909694] env[62816]: value = "task-1788831" [ 1715.909694] env[62816]: _type = "Task" [ 1715.909694] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.921218] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788831, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.929643] env[62816]: INFO nova.compute.manager [-] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Took 1.40 seconds to deallocate network for instance. [ 1716.277672] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.304439] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.386390] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Releasing lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.386842] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance network_info: |[{"id": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "address": "fa:16:3e:05:4a:15", "network": {"id": "28706faa-7199-41a4-996b-ae62937de3e1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-432903475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75028a965cf549cda255a3bc0b000e9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aab8d5d-a7", "ovs_interfaceid": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1716.387117] env[62816]: DEBUG oslo_concurrency.lockutils [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] Acquired lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.387260] env[62816]: DEBUG nova.network.neutron [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Refreshing network info cache for port 6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1716.389046] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:4a:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1e7173e-4163-4212-9339-aea3eddd359e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6aab8d5d-a76e-4738-8cab-9e6b59a195f0', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1716.397084] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Creating folder: Project (75028a965cf549cda255a3bc0b000e9a). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1716.398325] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8864dac7-41f1-48d1-a7f5-8ca97ff63987 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.417027] env[62816]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1716.417027] env[62816]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62816) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1716.417027] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Folder already exists: Project (75028a965cf549cda255a3bc0b000e9a). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1716.417027] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Creating folder: Instances. Parent ref: group-v371110. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1716.417540] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f212d48-b95d-4bad-9081-e513966100a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.422847] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788831, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16275} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.423262] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1716.424612] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b943b9-caa9-4c47-93e2-49a1a2e73048 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.431265] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Created folder: Instances in parent group-v371110. [ 1716.431265] env[62816]: DEBUG oslo.service.loopingcall [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.431265] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9972b167-a950-4dba-ac02-068f66300053] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1716.431265] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd25019b-8a1a-4d58-ae55-057cafe7c2a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.456211] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.464863] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/b9e8af08-9579-4dbf-8ea1-35ffab75e159.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1716.465635] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f419edf-8067-46d0-b54e-e461c4543d87 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.480984] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1716.480984] env[62816]: value = "task-1788834" [ 1716.480984] env[62816]: _type = "Task" [ 1716.480984] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.487618] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1716.487618] env[62816]: value = "task-1788835" [ 1716.487618] env[62816]: _type = "Task" [ 1716.487618] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.491368] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788834, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.500968] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788835, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.878917] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720f8aa2-4fa0-415f-a98f-04c31e50f7a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.887082] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c3a7392-ce3b-45bf-8f42-eef63ec13bed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.919781] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61196854-a24a-4dfe-8c73-be252a9083a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.929264] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a0a126-2cbf-4a87-9a10-8ff709750a26 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.944253] env[62816]: DEBUG nova.compute.provider_tree [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.994201] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788834, 'name': CreateVM_Task, 'duration_secs': 0.413343} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.997151] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9972b167-a950-4dba-ac02-068f66300053] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1716.997883] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371117', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'name': 'volume-b605cfce-b06c-4615-a606-12cb89b4a2d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9972b167-a950-4dba-ac02-068f66300053', 'attached_at': '', 'detached_at': '', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'serial': 'b605cfce-b06c-4615-a606-12cb89b4a2d4'}, 'disk_bus': None, 'delete_on_termination': True, 'attachment_id': '76ccc3ca-fd03-4d20-9473-b38035808244', 'volume_type': None}], 'swap': None} {{(pid=62816) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1716.998125] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Root volume attach. Driver type: vmdk {{(pid=62816) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1716.999228] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc4b9eb-9cf4-4c00-a583-9619224cfafd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.004449] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788835, 'name': ReconfigVM_Task, 'duration_secs': 0.31552} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.004964] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfigured VM instance instance-0000004e to attach disk [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/b9e8af08-9579-4dbf-8ea1-35ffab75e159.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1717.005558] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fefc432e-2ff9-4c14-96ca-f6a81435c3fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.010206] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7ff41d-9312-4ba9-85cc-fb337f87860a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.015023] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1717.015023] env[62816]: value = "task-1788836" [ 1717.015023] env[62816]: _type = "Task" [ 1717.015023] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.019057] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05da8ae4-5dd0-4242-85b6-f0a1fe016731 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.026466] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788836, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.028524] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-9211ceb4-a938-47f2-b59d-0db1e862a52d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.035249] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1717.035249] env[62816]: value = "task-1788837" [ 1717.035249] env[62816]: _type = "Task" [ 1717.035249] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.045449] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788837, 'name': RelocateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.205324] env[62816]: DEBUG nova.network.neutron [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updated VIF entry in instance network info cache for port 6aab8d5d-a76e-4738-8cab-9e6b59a195f0. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1717.205807] env[62816]: DEBUG nova.network.neutron [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updating instance_info_cache with network_info: [{"id": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "address": "fa:16:3e:05:4a:15", "network": {"id": "28706faa-7199-41a4-996b-ae62937de3e1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-432903475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75028a965cf549cda255a3bc0b000e9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aab8d5d-a7", "ovs_interfaceid": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.447444] env[62816]: DEBUG nova.scheduler.client.report [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1717.528024] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788836, 'name': Rename_Task, 'duration_secs': 0.148091} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.528024] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1717.528024] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c262173-f255-4722-9c8b-40593de2041f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.535494] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1717.535494] env[62816]: value = "task-1788838" [ 1717.535494] env[62816]: _type = "Task" [ 1717.535494] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.548279] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.551726] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788837, 'name': RelocateVM_Task, 'duration_secs': 0.399022} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.551726] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1717.551876] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371117', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'name': 'volume-b605cfce-b06c-4615-a606-12cb89b4a2d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9972b167-a950-4dba-ac02-068f66300053', 'attached_at': '', 'detached_at': '', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'serial': 'b605cfce-b06c-4615-a606-12cb89b4a2d4'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1717.552735] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20a8b50-0eb8-4d50-9a1a-314de23a75ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.570979] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f35da4d-c9fc-4c31-96d1-856e48c1d3ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.594947] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] volume-b605cfce-b06c-4615-a606-12cb89b4a2d4/volume-b605cfce-b06c-4615-a606-12cb89b4a2d4.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1717.595304] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1ff049c-b5a0-4e1d-93c6-0c0b435952bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.617532] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1717.617532] env[62816]: value = "task-1788839" [ 1717.617532] env[62816]: _type = "Task" [ 1717.617532] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.625767] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788839, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.708753] env[62816]: DEBUG oslo_concurrency.lockutils [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] Releasing lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.709185] env[62816]: DEBUG nova.compute.manager [req-b138a722-b0db-4863-b4a1-bec51016309c req-8d68bab0-520d-47f8-a054-ddd33a925e3b service nova] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Received event network-vif-deleted-a22f6d4b-1226-4202-95b7-f25e7108759f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1717.953796] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.954401] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1717.961025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.680s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.961025] env[62816]: INFO nova.compute.claims [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1718.046540] env[62816]: DEBUG oslo_vmware.api [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788838, 'name': PowerOnVM_Task, 'duration_secs': 0.465596} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.046836] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1718.047055] env[62816]: INFO nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Took 7.16 seconds to spawn the instance on the hypervisor. [ 1718.047245] env[62816]: DEBUG nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1718.048196] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe91eb2b-4603-4602-abb0-f5c8acaf5b2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.132682] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788839, 'name': ReconfigVM_Task, 'duration_secs': 0.257166} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.132940] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Reconfigured VM instance instance-0000004f to attach disk [datastore1] volume-b605cfce-b06c-4615-a606-12cb89b4a2d4/volume-b605cfce-b06c-4615-a606-12cb89b4a2d4.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1718.139333] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-582e5b68-ee9b-4b03-a64a-953c236a11e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.157157] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1718.157157] env[62816]: value = "task-1788840" [ 1718.157157] env[62816]: _type = "Task" [ 1718.157157] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.171426] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788840, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.464105] env[62816]: DEBUG nova.compute.utils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1718.469027] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1718.469027] env[62816]: DEBUG nova.network.neutron [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1718.534192] env[62816]: DEBUG nova.policy [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bfce7acecae4c45b59ae478da8c6a67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '138797faa4144ecbad6956e126963199', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1718.571058] env[62816]: INFO nova.compute.manager [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Took 15.42 seconds to build instance. [ 1718.670903] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788840, 'name': ReconfigVM_Task, 'duration_secs': 0.152002} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.671301] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371117', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'name': 'volume-b605cfce-b06c-4615-a606-12cb89b4a2d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9972b167-a950-4dba-ac02-068f66300053', 'attached_at': '', 'detached_at': '', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'serial': 'b605cfce-b06c-4615-a606-12cb89b4a2d4'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1718.671864] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-185ae5d6-7d74-472a-b5b9-f01e72d141ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.680623] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1718.680623] env[62816]: value = "task-1788841" [ 1718.680623] env[62816]: _type = "Task" [ 1718.680623] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.690979] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788841, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.968420] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1719.010998] env[62816]: DEBUG nova.network.neutron [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Successfully created port: d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1719.073449] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2cfdcd94-21aa-4a51-add3-9e3d3641c58f tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.926s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1719.193237] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788841, 'name': Rename_Task, 'duration_secs': 0.163648} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.193540] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1719.194274] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7939e240-74d3-487e-8497-329e87851f5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.203136] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1719.203136] env[62816]: value = "task-1788842" [ 1719.203136] env[62816]: _type = "Task" [ 1719.203136] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.217363] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788842, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.293079] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.293079] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.297017] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5cd825-e1c3-40ba-93f9-caa160f89327 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.309128] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6cf3c1-ed5a-4dfa-84c4-3fb387d129d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.345249] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9afe1f1-d3fc-48e4-9e6b-0c6d4d7a8837 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.355693] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea90884-5032-45d1-9b59-5fb43cf11cfc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.377523] env[62816]: DEBUG nova.compute.provider_tree [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1719.398577] env[62816]: DEBUG nova.compute.manager [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1719.398724] env[62816]: DEBUG nova.compute.manager [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing instance network info cache due to event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1719.398921] env[62816]: DEBUG oslo_concurrency.lockutils [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.399101] env[62816]: DEBUG oslo_concurrency.lockutils [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.399268] env[62816]: DEBUG nova.network.neutron [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1719.713307] env[62816]: DEBUG oslo_vmware.api [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788842, 'name': PowerOnVM_Task, 'duration_secs': 0.494591} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.713675] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1719.713801] env[62816]: INFO nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Took 4.66 seconds to spawn the instance on the hypervisor. [ 1719.713979] env[62816]: DEBUG nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1719.714787] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a4fe5e-fb58-4d81-90e7-8714e39b15ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.801670] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1719.801670] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1719.882709] env[62816]: DEBUG nova.scheduler.client.report [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1719.985519] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1720.014060] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1720.014060] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1720.014060] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1720.014060] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1720.014060] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1720.014654] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1720.015069] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1720.015412] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1720.018016] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1720.018016] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1720.018016] env[62816]: DEBUG nova.virt.hardware [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1720.018016] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f7e5c5-3dea-4e4b-a483-268ed5bc6ebe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.026465] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d278e469-cbd4-4e03-9131-374bd384c2f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.082780] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "74c15238-221c-4d1c-8577-4046d5666e45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.082780] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "74c15238-221c-4d1c-8577-4046d5666e45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.160453] env[62816]: DEBUG nova.network.neutron [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updated VIF entry in instance network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1720.160973] env[62816]: DEBUG nova.network.neutron [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.237996] env[62816]: INFO nova.compute.manager [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Took 16.93 seconds to build instance. [ 1720.387706] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.388553] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1720.392166] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.088s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.393951] env[62816]: INFO nova.compute.claims [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1720.584523] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1720.664550] env[62816]: DEBUG oslo_concurrency.lockutils [req-4c003f0f-465b-464a-a165-686a8090df0e req-41398c48-f9f7-49f1-bc9c-473a36b3fdcc service nova] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.692595] env[62816]: DEBUG nova.compute.manager [req-0739b44f-27d2-4064-83ec-cf23a52849d0 req-08add21f-1dac-4601-ab30-ea0e79b1dd79 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-vif-plugged-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1720.692945] env[62816]: DEBUG oslo_concurrency.lockutils [req-0739b44f-27d2-4064-83ec-cf23a52849d0 req-08add21f-1dac-4601-ab30-ea0e79b1dd79 service nova] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.693190] env[62816]: DEBUG oslo_concurrency.lockutils [req-0739b44f-27d2-4064-83ec-cf23a52849d0 req-08add21f-1dac-4601-ab30-ea0e79b1dd79 service nova] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.693440] env[62816]: DEBUG oslo_concurrency.lockutils [req-0739b44f-27d2-4064-83ec-cf23a52849d0 req-08add21f-1dac-4601-ab30-ea0e79b1dd79 service nova] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.693640] env[62816]: DEBUG nova.compute.manager [req-0739b44f-27d2-4064-83ec-cf23a52849d0 req-08add21f-1dac-4601-ab30-ea0e79b1dd79 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] No waiting events found dispatching network-vif-plugged-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1720.693853] env[62816]: WARNING nova.compute.manager [req-0739b44f-27d2-4064-83ec-cf23a52849d0 req-08add21f-1dac-4601-ab30-ea0e79b1dd79 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received unexpected event network-vif-plugged-d0353b95-1d3d-4eab-9c03-374679fe2118 for instance with vm_state building and task_state spawning. [ 1720.740408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b3103df1-cf86-4fa7-9eae-674311c7dd12 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "9972b167-a950-4dba-ac02-068f66300053" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.447s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.783691] env[62816]: DEBUG nova.network.neutron [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Successfully updated port: d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1720.899475] env[62816]: DEBUG nova.compute.utils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1720.906862] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1720.907061] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1720.956947] env[62816]: DEBUG nova.policy [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0fe4b6013bd4f5dac5bccdbe4683b39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f53e13df2c6740cd9666b8d60fdbfd87', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1721.109211] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.234992] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Successfully created port: bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1721.287772] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.287996] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.288195] env[62816]: DEBUG nova.network.neutron [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.408015] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1721.433026] env[62816]: DEBUG nova.compute.manager [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Received event network-changed-6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1721.433026] env[62816]: DEBUG nova.compute.manager [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Refreshing instance network info cache due to event network-changed-6aab8d5d-a76e-4738-8cab-9e6b59a195f0. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1721.433026] env[62816]: DEBUG oslo_concurrency.lockutils [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] Acquiring lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.433026] env[62816]: DEBUG oslo_concurrency.lockutils [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] Acquired lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.433026] env[62816]: DEBUG nova.network.neutron [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Refreshing network info cache for port 6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1721.734106] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24193d51-1e1d-4ccb-8672-8f4b3bc0eee7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.743068] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a167d6a2-a522-4245-9984-ef34b28a38cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.773283] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2098a1fc-4aa2-4ac4-ab9d-6b25fdf0db53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.780559] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71997ea-4670-49cd-aa60-8ab742c26a17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.795655] env[62816]: DEBUG nova.compute.provider_tree [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.824078] env[62816]: DEBUG nova.network.neutron [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1721.954064] env[62816]: DEBUG nova.network.neutron [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.153577] env[62816]: DEBUG nova.network.neutron [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updated VIF entry in instance network info cache for port 6aab8d5d-a76e-4738-8cab-9e6b59a195f0. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1722.153967] env[62816]: DEBUG nova.network.neutron [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updating instance_info_cache with network_info: [{"id": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "address": "fa:16:3e:05:4a:15", "network": {"id": "28706faa-7199-41a4-996b-ae62937de3e1", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-432903475-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.179", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "75028a965cf549cda255a3bc0b000e9a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1e7173e-4163-4212-9339-aea3eddd359e", "external-id": "nsx-vlan-transportzone-525", "segmentation_id": 525, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aab8d5d-a7", "ovs_interfaceid": "6aab8d5d-a76e-4738-8cab-9e6b59a195f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.299519] env[62816]: DEBUG nova.scheduler.client.report [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.420703] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1722.448075] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1722.448364] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1722.448547] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1722.448734] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1722.448979] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1722.449219] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1722.449455] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1722.449638] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1722.449838] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1722.450032] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1722.450250] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1722.451200] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f502e9-8495-4648-8e68-4bb4ee743429 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.459721] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.460277] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance network_info: |[{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1722.460577] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:e4:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0353b95-1d3d-4eab-9c03-374679fe2118', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1722.467744] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating folder: Project (138797faa4144ecbad6956e126963199). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1722.468959] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f9f1cd-70f1-43ee-b327-5c96bb98c916 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.473302] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e551996-4f7e-40cc-9705-62608113cec0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.489768] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created folder: Project (138797faa4144ecbad6956e126963199) in parent group-v370905. [ 1722.490095] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating folder: Instances. Parent ref: group-v371131. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1722.490366] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05dac2f0-10e6-494c-9435-63d40b3f6c32 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.501784] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created folder: Instances in parent group-v371131. [ 1722.502096] env[62816]: DEBUG oslo.service.loopingcall [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1722.502344] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1722.502722] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e214482-7892-495f-99b8-3ffba6ff691d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.524479] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1722.524479] env[62816]: value = "task-1788845" [ 1722.524479] env[62816]: _type = "Task" [ 1722.524479] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.533253] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788845, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.656805] env[62816]: DEBUG oslo_concurrency.lockutils [req-fac9408f-12d3-4fc9-a349-c21826fa898a req-beb1a633-5d83-4f9e-a527-ddec533228c7 service nova] Releasing lock "refresh_cache-9972b167-a950-4dba-ac02-068f66300053" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.724025] env[62816]: DEBUG nova.compute.manager [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1722.724226] env[62816]: DEBUG nova.compute.manager [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing instance network info cache due to event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1722.724451] env[62816]: DEBUG oslo_concurrency.lockutils [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.724596] env[62816]: DEBUG oslo_concurrency.lockutils [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.724757] env[62816]: DEBUG nova.network.neutron [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1722.769036] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Successfully updated port: bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1722.804789] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.413s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.805648] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1722.808495] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.352s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.808767] env[62816]: DEBUG nova.objects.instance [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lazy-loading 'resources' on Instance uuid 679ce8d3-a57c-4620-81bc-ee8deea4bc8e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1723.035277] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788845, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.271313] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "refresh_cache-915127f6-2da7-4eab-a7cb-331a41d04d0e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.271313] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "refresh_cache-915127f6-2da7-4eab-a7cb-331a41d04d0e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.271523] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1723.311619] env[62816]: DEBUG nova.compute.utils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.315618] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1723.315924] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1723.360192] env[62816]: DEBUG nova.policy [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0fe4b6013bd4f5dac5bccdbe4683b39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f53e13df2c6740cd9666b8d60fdbfd87', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1723.447778] env[62816]: DEBUG nova.network.neutron [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updated VIF entry in instance network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1723.447950] env[62816]: DEBUG nova.network.neutron [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.537821] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788845, 'name': CreateVM_Task, 'duration_secs': 0.891553} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.537996] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1723.538688] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.538856] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.539202] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1723.539449] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-129136c6-7f0c-4f88-ae5c-bdc38e0f726d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.548026] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1723.548026] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52aea693-7d0a-dc47-a9fa-4be8d85cbf19" [ 1723.548026] env[62816]: _type = "Task" [ 1723.548026] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.558420] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52aea693-7d0a-dc47-a9fa-4be8d85cbf19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.586884] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089efc7b-7571-4ed3-b647-f5a5aeee7680 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.594954] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96788e40-02fe-427e-a64d-a4ebe6d76e35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.625144] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Successfully created port: c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1723.627354] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0706e2b-fda2-4626-b818-3d3ad529f7a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.634743] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d780cb-d7ac-4817-a040-49616ad415b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.647816] env[62816]: DEBUG nova.compute.provider_tree [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.803170] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1723.816436] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1723.823067] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1723.823274] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.823431] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.823582] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.823731] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.823915] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.824021] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.824152] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1723.824302] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1723.941691] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Updating instance_info_cache with network_info: [{"id": "bea401ef-e47d-48d5-9f02-5b82f1830a7e", "address": "fa:16:3e:f0:b6:5d", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea401ef-e4", "ovs_interfaceid": "bea401ef-e47d-48d5-9f02-5b82f1830a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.950729] env[62816]: DEBUG oslo_concurrency.lockutils [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.950789] env[62816]: DEBUG nova.compute.manager [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Received event network-vif-plugged-bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1723.950981] env[62816]: DEBUG oslo_concurrency.lockutils [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] Acquiring lock "915127f6-2da7-4eab-a7cb-331a41d04d0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.951218] env[62816]: DEBUG oslo_concurrency.lockutils [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.951389] env[62816]: DEBUG oslo_concurrency.lockutils [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.951559] env[62816]: DEBUG nova.compute.manager [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] No waiting events found dispatching network-vif-plugged-bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1723.951726] env[62816]: WARNING nova.compute.manager [req-86e9ae8d-8d00-4844-88df-dd898ff291d8 req-2f3c3cba-ed7a-46e7-a338-4d97f221c683 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Received unexpected event network-vif-plugged-bea401ef-e47d-48d5-9f02-5b82f1830a7e for instance with vm_state building and task_state spawning. [ 1724.060351] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52aea693-7d0a-dc47-a9fa-4be8d85cbf19, 'name': SearchDatastore_Task, 'duration_secs': 0.014464} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.060351] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.060617] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1724.060858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.061016] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.061209] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1724.061469] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b57eee6-1cca-4e6e-8fc1-b5e903fff45e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.070182] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1724.070365] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1724.071064] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592da6f8-1d7c-44cc-9d78-31a2d1b13bde {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.075879] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1724.075879] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f8d7ba-79f9-40c5-c325-9514f1f80409" [ 1724.075879] env[62816]: _type = "Task" [ 1724.075879] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.083256] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f8d7ba-79f9-40c5-c325-9514f1f80409, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.150904] env[62816]: DEBUG nova.scheduler.client.report [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1724.326970] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.445495] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "refresh_cache-915127f6-2da7-4eab-a7cb-331a41d04d0e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.445791] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Instance network_info: |[{"id": "bea401ef-e47d-48d5-9f02-5b82f1830a7e", "address": "fa:16:3e:f0:b6:5d", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea401ef-e4", "ovs_interfaceid": "bea401ef-e47d-48d5-9f02-5b82f1830a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1724.446336] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:b6:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bea401ef-e47d-48d5-9f02-5b82f1830a7e', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1724.454069] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Creating folder: Project (f53e13df2c6740cd9666b8d60fdbfd87). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1724.454367] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38bd6f1f-fb4f-4148-b7b9-f06da4f2f004 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.467791] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Created folder: Project (f53e13df2c6740cd9666b8d60fdbfd87) in parent group-v370905. [ 1724.467791] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Creating folder: Instances. Parent ref: group-v371134. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1724.468080] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-43a19e62-3e06-4559-8830-abbf9ba59716 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.478611] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Created folder: Instances in parent group-v371134. [ 1724.478881] env[62816]: DEBUG oslo.service.loopingcall [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.479089] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1724.479292] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77c1d4e2-0c3f-422f-8780-b660041313b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.497880] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1724.497880] env[62816]: value = "task-1788848" [ 1724.497880] env[62816]: _type = "Task" [ 1724.497880] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.505646] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788848, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.585668] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f8d7ba-79f9-40c5-c325-9514f1f80409, 'name': SearchDatastore_Task, 'duration_secs': 0.014071} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.586504] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1a5307a-ca0a-495c-8ab0-dd294fb63f90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.591668] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1724.591668] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5297d2b6-198e-5206-997c-8055259f010a" [ 1724.591668] env[62816]: _type = "Task" [ 1724.591668] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.599168] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5297d2b6-198e-5206-997c-8055259f010a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.656915] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.848s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.659349] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.550s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.660828] env[62816]: INFO nova.compute.claims [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1724.676690] env[62816]: INFO nova.scheduler.client.report [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleted allocations for instance 679ce8d3-a57c-4620-81bc-ee8deea4bc8e [ 1724.749923] env[62816]: DEBUG nova.compute.manager [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Received event network-changed-bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1724.750146] env[62816]: DEBUG nova.compute.manager [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Refreshing instance network info cache due to event network-changed-bea401ef-e47d-48d5-9f02-5b82f1830a7e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1724.750365] env[62816]: DEBUG oslo_concurrency.lockutils [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] Acquiring lock "refresh_cache-915127f6-2da7-4eab-a7cb-331a41d04d0e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.750508] env[62816]: DEBUG oslo_concurrency.lockutils [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] Acquired lock "refresh_cache-915127f6-2da7-4eab-a7cb-331a41d04d0e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.750666] env[62816]: DEBUG nova.network.neutron [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Refreshing network info cache for port bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1724.828918] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1724.854703] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1724.855011] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1724.855121] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1724.855302] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1724.855450] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1724.855596] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1724.855804] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1724.856020] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1724.856349] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1724.856428] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1724.856553] env[62816]: DEBUG nova.virt.hardware [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1724.857425] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adfada4-b0fc-4f53-8296-afa4c847e5ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.865345] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d47c9bbf-0d0d-4ccf-8799-fb996cb8ac0e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.011629] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788848, 'name': CreateVM_Task, 'duration_secs': 0.308304} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.012690] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1725.013854] env[62816]: DEBUG nova.compute.manager [req-022d9716-6c5f-40a9-9a00-be7f83982964 req-71c16ad4-76b5-4227-b5fb-6991615e05c0 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Received event network-vif-plugged-c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1725.014122] env[62816]: DEBUG oslo_concurrency.lockutils [req-022d9716-6c5f-40a9-9a00-be7f83982964 req-71c16ad4-76b5-4227-b5fb-6991615e05c0 service nova] Acquiring lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.014285] env[62816]: DEBUG oslo_concurrency.lockutils [req-022d9716-6c5f-40a9-9a00-be7f83982964 req-71c16ad4-76b5-4227-b5fb-6991615e05c0 service nova] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.014410] env[62816]: DEBUG oslo_concurrency.lockutils [req-022d9716-6c5f-40a9-9a00-be7f83982964 req-71c16ad4-76b5-4227-b5fb-6991615e05c0 service nova] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.014566] env[62816]: DEBUG nova.compute.manager [req-022d9716-6c5f-40a9-9a00-be7f83982964 req-71c16ad4-76b5-4227-b5fb-6991615e05c0 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] No waiting events found dispatching network-vif-plugged-c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1725.014726] env[62816]: WARNING nova.compute.manager [req-022d9716-6c5f-40a9-9a00-be7f83982964 req-71c16ad4-76b5-4227-b5fb-6991615e05c0 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Received unexpected event network-vif-plugged-c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 for instance with vm_state building and task_state spawning. [ 1725.015400] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.015550] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.015858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1725.016356] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aac9fd7-c1ff-48b6-a380-084b5ea02624 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.021513] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1725.021513] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5233e8a3-5b2d-042d-aecb-196b4053d1d4" [ 1725.021513] env[62816]: _type = "Task" [ 1725.021513] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.031112] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5233e8a3-5b2d-042d-aecb-196b4053d1d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.102555] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5297d2b6-198e-5206-997c-8055259f010a, 'name': SearchDatastore_Task, 'duration_secs': 0.00847} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.102911] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.103323] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1725.103689] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5e69145-e2e0-4ad7-90f6-3ff99e2c8316 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.106756] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Successfully updated port: c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1725.113356] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1725.113356] env[62816]: value = "task-1788849" [ 1725.113356] env[62816]: _type = "Task" [ 1725.113356] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.122138] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.183924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5c89b7ad-ad0b-4f7a-8d5a-1d8c31968d8c tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "679ce8d3-a57c-4620-81bc-ee8deea4bc8e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.813s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.533887] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5233e8a3-5b2d-042d-aecb-196b4053d1d4, 'name': SearchDatastore_Task, 'duration_secs': 0.00928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.534628] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.534885] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1725.535142] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.535503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.535503] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1725.535787] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-768c9ca9-8871-464b-bb51-2d1cdb1baa8b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.550368] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1725.550642] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1725.551474] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3e8ef88-b1da-4b64-b9e1-3c1b8cd9f119 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.558304] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1725.558304] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521935b3-e483-3d52-62dd-81be9ec34c6d" [ 1725.558304] env[62816]: _type = "Task" [ 1725.558304] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.572038] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521935b3-e483-3d52-62dd-81be9ec34c6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.615480] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "refresh_cache-0acc334c-e400-4b28-8ee7-8d6cafb057e9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.615480] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "refresh_cache-0acc334c-e400-4b28-8ee7-8d6cafb057e9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.615480] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.630647] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788849, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.745853] env[62816]: DEBUG nova.network.neutron [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Updated VIF entry in instance network info cache for port bea401ef-e47d-48d5-9f02-5b82f1830a7e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1725.746672] env[62816]: DEBUG nova.network.neutron [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Updating instance_info_cache with network_info: [{"id": "bea401ef-e47d-48d5-9f02-5b82f1830a7e", "address": "fa:16:3e:f0:b6:5d", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbea401ef-e4", "ovs_interfaceid": "bea401ef-e47d-48d5-9f02-5b82f1830a7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.914176] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b26e06-4c97-409e-b823-437b79109ac5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.925621] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5731853d-beab-4101-888b-55a2dc8def80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.959856] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f9a500f-eb34-4f65-ab14-1e2967e722d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.967904] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9353f2d-3dd5-481b-9555-7a7b21b37d52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.984306] env[62816]: DEBUG nova.compute.provider_tree [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.068318] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521935b3-e483-3d52-62dd-81be9ec34c6d, 'name': SearchDatastore_Task, 'duration_secs': 0.025312} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.069085] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c27bcd9f-7c54-482b-9b17-12050218df7b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.074175] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1726.074175] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524e7a20-ab32-f558-55b5-fec3538beac6" [ 1726.074175] env[62816]: _type = "Task" [ 1726.074175] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.081822] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524e7a20-ab32-f558-55b5-fec3538beac6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.126846] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788849, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518883} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.127097] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1726.127319] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1726.127812] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bbf4a86-b1a3-44a9-81f7-94def9cd4de7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.134222] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1726.134222] env[62816]: value = "task-1788850" [ 1726.134222] env[62816]: _type = "Task" [ 1726.134222] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.141840] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.154778] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1726.250843] env[62816]: DEBUG oslo_concurrency.lockutils [req-15464909-178b-46dc-bcee-f25008895cbd req-f14c68b6-ed61-40cc-9285-4b4580b82b30 service nova] Releasing lock "refresh_cache-915127f6-2da7-4eab-a7cb-331a41d04d0e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.280684] env[62816]: DEBUG nova.network.neutron [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Updating instance_info_cache with network_info: [{"id": "c7e34c23-d9bb-4fb9-b31d-4ac748f7c396", "address": "fa:16:3e:a9:94:73", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7e34c23-d9", "ovs_interfaceid": "c7e34c23-d9bb-4fb9-b31d-4ac748f7c396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.337632] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "9ab4e631-5b31-4b37-9b49-4f0423286752" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.338473] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.338702] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "9ab4e631-5b31-4b37-9b49-4f0423286752-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.338902] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.339117] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.341061] env[62816]: INFO nova.compute.manager [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Terminating instance [ 1726.342700] env[62816]: DEBUG nova.compute.manager [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1726.342896] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1726.343712] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8ed214-80c7-4277-ba25-67cbb7a14e15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.351023] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1726.351245] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a5e006f-98a8-46b1-ade9-bf6b63a25c31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.358098] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1726.358098] env[62816]: value = "task-1788851" [ 1726.358098] env[62816]: _type = "Task" [ 1726.358098] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.368441] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.407125] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "2583e2ba-8904-420c-a417-d6af71bfa9ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.407400] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.407600] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "2583e2ba-8904-420c-a417-d6af71bfa9ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.407777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.407947] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.410212] env[62816]: INFO nova.compute.manager [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Terminating instance [ 1726.411996] env[62816]: DEBUG nova.compute.manager [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1726.412207] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1726.413093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4297922b-10b0-46da-b814-36625d2b0c2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.421053] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1726.421304] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-29f22dd5-434c-445c-855c-cb50706a5937 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.427647] env[62816]: DEBUG oslo_vmware.api [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1726.427647] env[62816]: value = "task-1788852" [ 1726.427647] env[62816]: _type = "Task" [ 1726.427647] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.435525] env[62816]: DEBUG oslo_vmware.api [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.487337] env[62816]: DEBUG nova.scheduler.client.report [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1726.584217] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524e7a20-ab32-f558-55b5-fec3538beac6, 'name': SearchDatastore_Task, 'duration_secs': 0.031006} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.584505] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.584793] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 915127f6-2da7-4eab-a7cb-331a41d04d0e/915127f6-2da7-4eab-a7cb-331a41d04d0e.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1726.585064] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a29b46f9-1d55-41b6-9093-1fb385b884fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.591345] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1726.591345] env[62816]: value = "task-1788853" [ 1726.591345] env[62816]: _type = "Task" [ 1726.591345] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.599206] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.643794] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068578} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.644081] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1726.644881] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53c3b24-69d8-42c5-b8d9-4cc583a89c36 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.667090] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1726.667361] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ccd9c1e-89c7-4525-9f5e-9bacd08db8ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.686659] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1726.686659] env[62816]: value = "task-1788854" [ 1726.686659] env[62816]: _type = "Task" [ 1726.686659] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.694091] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788854, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.784024] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "refresh_cache-0acc334c-e400-4b28-8ee7-8d6cafb057e9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.784024] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Instance network_info: |[{"id": "c7e34c23-d9bb-4fb9-b31d-4ac748f7c396", "address": "fa:16:3e:a9:94:73", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7e34c23-d9", "ovs_interfaceid": "c7e34c23-d9bb-4fb9-b31d-4ac748f7c396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1726.784543] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:94:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c7e34c23-d9bb-4fb9-b31d-4ac748f7c396', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1726.792881] env[62816]: DEBUG oslo.service.loopingcall [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.793126] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1726.793351] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8a4dda5-29f5-4153-ae8a-64c166373edb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.813759] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1726.813759] env[62816]: value = "task-1788855" [ 1726.813759] env[62816]: _type = "Task" [ 1726.813759] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.828645] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788855, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.867837] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788851, 'name': PowerOffVM_Task, 'duration_secs': 0.253183} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.868250] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1726.868441] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1726.868701] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ecf92f9-c1c4-4b63-b86b-a2a19469ac7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.940705] env[62816]: DEBUG oslo_vmware.api [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788852, 'name': PowerOffVM_Task, 'duration_secs': 0.250878} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.941213] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1726.941213] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1726.941399] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-192cefec-6b4d-4771-ac67-79c4609cd4b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.948144] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1726.948444] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1726.948640] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleting the datastore file [datastore1] 9ab4e631-5b31-4b37-9b49-4f0423286752 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1726.948890] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43c7d63c-9944-4ecb-91d2-358cad8c7692 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.955186] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1726.955186] env[62816]: value = "task-1788858" [ 1726.955186] env[62816]: _type = "Task" [ 1726.955186] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.962894] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.992990] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.993592] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1726.996150] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.669s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.996347] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.996938] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1726.997749] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb43e05-5eac-4614-81a4-835149a5872b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.007139] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3447f3-8cd3-4cd8-937c-aedf3a3cd3f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.017882] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1727.018281] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1727.018563] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleting the datastore file [datastore1] 2583e2ba-8904-420c-a417-d6af71bfa9ac {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1727.027493] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ec5f5d6d-9076-4eb5-bf5f-a22c336b16a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.031273] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7ef0e0-70f8-4922-8295-7cccbdebf6e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.041184] env[62816]: DEBUG oslo_vmware.api [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for the task: (returnval){ [ 1727.041184] env[62816]: value = "task-1788859" [ 1727.041184] env[62816]: _type = "Task" [ 1727.041184] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.042946] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302846e1-6a88-46e5-bccc-e78eca58d6ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.051286] env[62816]: DEBUG nova.compute.manager [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Received event network-changed-c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.051492] env[62816]: DEBUG nova.compute.manager [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Refreshing instance network info cache due to event network-changed-c7e34c23-d9bb-4fb9-b31d-4ac748f7c396. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1727.051709] env[62816]: DEBUG oslo_concurrency.lockutils [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] Acquiring lock "refresh_cache-0acc334c-e400-4b28-8ee7-8d6cafb057e9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.051851] env[62816]: DEBUG oslo_concurrency.lockutils [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] Acquired lock "refresh_cache-0acc334c-e400-4b28-8ee7-8d6cafb057e9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.052026] env[62816]: DEBUG nova.network.neutron [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Refreshing network info cache for port c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1727.084244] env[62816]: DEBUG oslo_vmware.api [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.084853] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179288MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1727.084999] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.085210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.101915] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788853, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.198147] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788854, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.326731] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788855, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.464561] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788858, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.501029] env[62816]: DEBUG nova.compute.utils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1727.501762] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1727.502140] env[62816]: DEBUG nova.network.neutron [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1727.544286] env[62816]: DEBUG nova.policy [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8b9804812a99462b8d0da47d437ca59c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5b59045366914873aef4d1ea4134fe46', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1727.556990] env[62816]: DEBUG oslo_vmware.api [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.476899} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.557296] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1727.557486] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1727.557675] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1727.557841] env[62816]: INFO nova.compute.manager [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1727.558127] env[62816]: DEBUG oslo.service.loopingcall [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.558663] env[62816]: DEBUG nova.compute.manager [-] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1727.558775] env[62816]: DEBUG nova.network.neutron [-] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1727.605768] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788853, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.823718} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.606106] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 915127f6-2da7-4eab-a7cb-331a41d04d0e/915127f6-2da7-4eab-a7cb-331a41d04d0e.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1727.606278] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1727.607112] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb2b8513-a120-47f2-9f79-5acc906c9a03 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.614643] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1727.614643] env[62816]: value = "task-1788860" [ 1727.614643] env[62816]: _type = "Task" [ 1727.614643] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.626194] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.698747] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788854, 'name': ReconfigVM_Task, 'duration_secs': 0.86228} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.699058] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfigured VM instance instance-00000050 to attach disk [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1727.699748] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0d0ce52-de71-4a63-a987-d8e20c7093ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.706425] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1727.706425] env[62816]: value = "task-1788861" [ 1727.706425] env[62816]: _type = "Task" [ 1727.706425] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.714447] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788861, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.814414] env[62816]: DEBUG nova.network.neutron [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Successfully created port: 960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1727.834045] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788855, 'name': CreateVM_Task, 'duration_secs': 0.736815} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.834232] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1727.834869] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.835050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.835365] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1727.835616] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-455d7fb9-a81d-4488-b30a-e44fd2ac7002 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.839977] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1727.839977] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5287184a-18d7-7b63-bb7f-974aac41a7aa" [ 1727.839977] env[62816]: _type = "Task" [ 1727.839977] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.847641] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5287184a-18d7-7b63-bb7f-974aac41a7aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.919076] env[62816]: DEBUG nova.network.neutron [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Updated VIF entry in instance network info cache for port c7e34c23-d9bb-4fb9-b31d-4ac748f7c396. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1727.919508] env[62816]: DEBUG nova.network.neutron [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Updating instance_info_cache with network_info: [{"id": "c7e34c23-d9bb-4fb9-b31d-4ac748f7c396", "address": "fa:16:3e:a9:94:73", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc7e34c23-d9", "ovs_interfaceid": "c7e34c23-d9bb-4fb9-b31d-4ac748f7c396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.965375] env[62816]: DEBUG oslo_vmware.api [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Task: {'id': task-1788858, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.515086} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.965681] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1727.965876] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1727.966071] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1727.966257] env[62816]: INFO nova.compute.manager [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1727.966507] env[62816]: DEBUG oslo.service.loopingcall [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.966708] env[62816]: DEBUG nova.compute.manager [-] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1727.966827] env[62816]: DEBUG nova.network.neutron [-] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1728.005191] env[62816]: DEBUG nova.compute.manager [req-55e5ebda-2093-4f8a-89b7-b8a453215482 req-06d60065-9a2c-496e-a851-21ca4a0e5354 service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Received event network-vif-deleted-09a63dbd-34b4-4340-887c-035cebe037ff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1728.005427] env[62816]: INFO nova.compute.manager [req-55e5ebda-2093-4f8a-89b7-b8a453215482 req-06d60065-9a2c-496e-a851-21ca4a0e5354 service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Neutron deleted interface 09a63dbd-34b4-4340-887c-035cebe037ff; detaching it from the instance and deleting it from the info cache [ 1728.005699] env[62816]: DEBUG nova.network.neutron [req-55e5ebda-2093-4f8a-89b7-b8a453215482 req-06d60065-9a2c-496e-a851-21ca4a0e5354 service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.009789] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1728.125070] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149434} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.125363] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1728.126183] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0456d457-7fb1-4efd-87dc-4c39953250fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.148139] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 915127f6-2da7-4eab-a7cb-331a41d04d0e/915127f6-2da7-4eab-a7cb-331a41d04d0e.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1728.148463] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-117b9ca0-1b31-4e1b-a194-ac76fce06518 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.168591] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1728.168591] env[62816]: value = "task-1788862" [ 1728.168591] env[62816]: _type = "Task" [ 1728.168591] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.179038] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.216739] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788861, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.219441] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0dbf907f-0313-435c-a8be-19f7e48ded76 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.219581] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f9d9593a-1c25-47a1-98fd-4462a851f134 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.219755] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance a01e772c-dafe-4091-bae6-f9f59d5c972d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.219823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 31ac8296-14fa-46f7-b825-c31904b832d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.219939] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 8105e650-8482-40c6-bd7a-b8daea19a0d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.220068] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9745413b-2bd8-45d7-8491-483e4921b59c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.220306] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance dd833e38-691c-4757-9c6b-659c74343d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.220544] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c66fa160-d4dd-429f-8751-f36cb2020ff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.220689] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 543d69d2-0694-4d57-bbae-f8851ff0f0dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.220816] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance d03ed540-5c20-4bcb-ac7e-eec8c09e4451 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.220938] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4ab07a21-2685-42bc-af13-b95473993d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221082] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9ab4e631-5b31-4b37-9b49-4f0423286752 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221273] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 2583e2ba-8904-420c-a417-d6af71bfa9ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221403] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b9e8af08-9579-4dbf-8ea1-35ffab75e159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221528] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9972b167-a950-4dba-ac02-068f66300053 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221630] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f97ea34e-792e-4023-bd2f-549dba129925 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221787] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 915127f6-2da7-4eab-a7cb-331a41d04d0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221861] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0acc334c-e400-4b28-8ee7-8d6cafb057e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.221976] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 74c15238-221c-4d1c-8577-4046d5666e45 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.222243] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 19 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1728.222437] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4160MB phys_disk=200GB used_disk=19GB total_vcpus=48 used_vcpus=19 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1728.352329] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5287184a-18d7-7b63-bb7f-974aac41a7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.008959} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.352602] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.352833] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1728.353085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1728.353309] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.353496] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1728.355676] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b640c554-d3fb-444b-b3b8-591216b8a620 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.364112] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1728.364309] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1728.364996] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ae8a708-a500-4fac-b6aa-20727c61cb1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.372522] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1728.372522] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5219f81a-ba96-7550-bf24-4e7d60a07a68" [ 1728.372522] env[62816]: _type = "Task" [ 1728.372522] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.381163] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5219f81a-ba96-7550-bf24-4e7d60a07a68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.422515] env[62816]: DEBUG oslo_concurrency.lockutils [req-4f238d49-1bfd-481f-aafd-379329eb6541 req-1de47c00-cdc7-4680-80dd-68aac2a12a9e service nova] Releasing lock "refresh_cache-0acc334c-e400-4b28-8ee7-8d6cafb057e9" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.436589] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c182ae-834d-4a13-9fd4-f5b6f0411638 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.443823] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce553ac3-0665-4187-b7af-f7e8967f6e92 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.474354] env[62816]: DEBUG nova.network.neutron [-] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.475971] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89fa312-4ff1-4739-bf7c-6416b1e3fd96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.483593] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd01be3-59ab-4c95-be01-23ad67b0dc76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.497728] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.510829] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1694092c-914d-4bdf-ab78-d0fc717ffcd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.522927] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb49f31-f7cf-4515-ba26-71c30428d3a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.559041] env[62816]: DEBUG nova.compute.manager [req-55e5ebda-2093-4f8a-89b7-b8a453215482 req-06d60065-9a2c-496e-a851-21ca4a0e5354 service nova] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Detach interface failed, port_id=09a63dbd-34b4-4340-887c-035cebe037ff, reason: Instance 2583e2ba-8904-420c-a417-d6af71bfa9ac could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1728.679019] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.693853] env[62816]: DEBUG nova.network.neutron [-] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.716434] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788861, 'name': Rename_Task, 'duration_secs': 0.937347} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.716703] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1728.716954] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06116f29-b84d-492a-b55e-8c3144f1bb16 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.723100] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1728.723100] env[62816]: value = "task-1788863" [ 1728.723100] env[62816]: _type = "Task" [ 1728.723100] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.730301] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.884054] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5219f81a-ba96-7550-bf24-4e7d60a07a68, 'name': SearchDatastore_Task, 'duration_secs': 0.008408} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.884802] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5472b0d9-0aac-496b-9c87-083a79513dfc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.889932] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1728.889932] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520743c8-39d1-aeb5-d35d-cdbf91e1369b" [ 1728.889932] env[62816]: _type = "Task" [ 1728.889932] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.897432] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520743c8-39d1-aeb5-d35d-cdbf91e1369b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.979682] env[62816]: INFO nova.compute.manager [-] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Took 1.42 seconds to deallocate network for instance. [ 1729.000239] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1729.017824] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1729.043367] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1729.043637] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1729.043795] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1729.043978] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1729.044202] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1729.044379] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1729.044586] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1729.044747] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1729.044914] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1729.045120] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1729.045370] env[62816]: DEBUG nova.virt.hardware [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1729.046302] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-514a0129-b641-452b-8253-bc67ee32b8dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.055121] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716c7760-b7aa-4184-9838-fc063c5f7ae4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.179615] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788862, 'name': ReconfigVM_Task, 'duration_secs': 0.61024} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.179843] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 915127f6-2da7-4eab-a7cb-331a41d04d0e/915127f6-2da7-4eab-a7cb-331a41d04d0e.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1729.180780] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0494b8f7-f6eb-44e2-9efb-37eedd1a5a10 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.186441] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1729.186441] env[62816]: value = "task-1788864" [ 1729.186441] env[62816]: _type = "Task" [ 1729.186441] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.194053] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788864, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.196559] env[62816]: INFO nova.compute.manager [-] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Took 1.23 seconds to deallocate network for instance. [ 1729.232899] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788863, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.351632] env[62816]: DEBUG nova.compute.manager [req-8fe5ebfd-9c17-465b-b072-73cd95e33b1d req-0e201437-ffe0-4f4b-9358-d345c64c0bed service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Received event network-vif-plugged-960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1729.351875] env[62816]: DEBUG oslo_concurrency.lockutils [req-8fe5ebfd-9c17-465b-b072-73cd95e33b1d req-0e201437-ffe0-4f4b-9358-d345c64c0bed service nova] Acquiring lock "74c15238-221c-4d1c-8577-4046d5666e45-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.352176] env[62816]: DEBUG oslo_concurrency.lockutils [req-8fe5ebfd-9c17-465b-b072-73cd95e33b1d req-0e201437-ffe0-4f4b-9358-d345c64c0bed service nova] Lock "74c15238-221c-4d1c-8577-4046d5666e45-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.352293] env[62816]: DEBUG oslo_concurrency.lockutils [req-8fe5ebfd-9c17-465b-b072-73cd95e33b1d req-0e201437-ffe0-4f4b-9358-d345c64c0bed service nova] Lock "74c15238-221c-4d1c-8577-4046d5666e45-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.352480] env[62816]: DEBUG nova.compute.manager [req-8fe5ebfd-9c17-465b-b072-73cd95e33b1d req-0e201437-ffe0-4f4b-9358-d345c64c0bed service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] No waiting events found dispatching network-vif-plugged-960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1729.352666] env[62816]: WARNING nova.compute.manager [req-8fe5ebfd-9c17-465b-b072-73cd95e33b1d req-0e201437-ffe0-4f4b-9358-d345c64c0bed service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Received unexpected event network-vif-plugged-960e87a7-42d1-4509-8875-6344407fc457 for instance with vm_state building and task_state spawning. [ 1729.400615] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520743c8-39d1-aeb5-d35d-cdbf91e1369b, 'name': SearchDatastore_Task, 'duration_secs': 0.047397} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.400965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.401174] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0acc334c-e400-4b28-8ee7-8d6cafb057e9/0acc334c-e400-4b28-8ee7-8d6cafb057e9.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1729.401448] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c246ee1-83b9-4fe6-9baa-1208704e566a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.408217] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1729.408217] env[62816]: value = "task-1788865" [ 1729.408217] env[62816]: _type = "Task" [ 1729.408217] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.415803] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.453303] env[62816]: DEBUG nova.network.neutron [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Successfully updated port: 960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1729.486835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.505263] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1729.505491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.420s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.505792] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.019s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.506136] env[62816]: DEBUG nova.objects.instance [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lazy-loading 'resources' on Instance uuid 2583e2ba-8904-420c-a417-d6af71bfa9ac {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1729.697529] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788864, 'name': Rename_Task, 'duration_secs': 0.237623} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.697899] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1729.698230] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75ce7140-5c34-4710-8af0-2be3095cff41 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.705725] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.706163] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1729.706163] env[62816]: value = "task-1788866" [ 1729.706163] env[62816]: _type = "Task" [ 1729.706163] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.717454] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788866, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.734261] env[62816]: DEBUG oslo_vmware.api [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788863, 'name': PowerOnVM_Task, 'duration_secs': 0.821077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.734555] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1729.734799] env[62816]: INFO nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Took 9.75 seconds to spawn the instance on the hypervisor. [ 1729.734986] env[62816]: DEBUG nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1729.735918] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976a2f6e-212a-4110-a4d0-3f2286a78271 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.919346] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.466836} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.919669] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0acc334c-e400-4b28-8ee7-8d6cafb057e9/0acc334c-e400-4b28-8ee7-8d6cafb057e9.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1729.919878] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1729.920172] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0a5f16b-3707-4faa-8005-8229feb2ef8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.927537] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1729.927537] env[62816]: value = "task-1788867" [ 1729.927537] env[62816]: _type = "Task" [ 1729.927537] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.935185] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788867, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.954119] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "refresh_cache-74c15238-221c-4d1c-8577-4046d5666e45" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1729.954278] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquired lock "refresh_cache-74c15238-221c-4d1c-8577-4046d5666e45" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1729.954436] env[62816]: DEBUG nova.network.neutron [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1730.034951] env[62816]: DEBUG nova.compute.manager [req-99f15b6c-9304-4db6-8719-b77ca8d4ef03 req-828d669e-6be0-4a06-bba9-344d0d408f23 service nova] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Received event network-vif-deleted-07bf7c71-9e13-40e5-b267-965611379c35 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1730.219255] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788866, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.245700] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c1ef8a-b0c2-45f2-8633-cc3aa5a544f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.257219] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8adfff6d-33e0-49b9-9da6-c2773921d10d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.260949] env[62816]: INFO nova.compute.manager [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Took 14.67 seconds to build instance. [ 1730.290051] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcf690a-1e35-4348-90ee-cf85b4749716 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.298470] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a08532-fc5b-41de-b273-daa296678a3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.314192] env[62816]: DEBUG nova.compute.provider_tree [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1730.438951] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788867, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067559} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.439229] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1730.439999] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516989d1-e0ca-40a2-b403-e49cd62775cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.467519] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 0acc334c-e400-4b28-8ee7-8d6cafb057e9/0acc334c-e400-4b28-8ee7-8d6cafb057e9.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1730.467863] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-103fa684-be23-4d5b-a6ed-d72c33a3d1d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.487836] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1730.487836] env[62816]: value = "task-1788868" [ 1730.487836] env[62816]: _type = "Task" [ 1730.487836] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.496358] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788868, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.513457] env[62816]: DEBUG nova.network.neutron [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1730.657812] env[62816]: DEBUG nova.network.neutron [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Updating instance_info_cache with network_info: [{"id": "960e87a7-42d1-4509-8875-6344407fc457", "address": "fa:16:3e:0f:96:6a", "network": {"id": "41701914-7428-47f2-8fc0-00cec0155d84", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-550490719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b59045366914873aef4d1ea4134fe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap960e87a7-42", "ovs_interfaceid": "960e87a7-42d1-4509-8875-6344407fc457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1730.717628] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788866, 'name': PowerOnVM_Task, 'duration_secs': 0.792285} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.717895] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1730.718180] env[62816]: INFO nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1730.718348] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1730.719195] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49279652-0ce9-4ca8-b873-29478f07a2ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.762833] env[62816]: DEBUG oslo_concurrency.lockutils [None req-75743094-37a3-4dca-b0e6-3e4dd0cc8483 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.189s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.818653] env[62816]: DEBUG nova.scheduler.client.report [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1730.998386] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788868, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.161704] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Releasing lock "refresh_cache-74c15238-221c-4d1c-8577-4046d5666e45" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.162073] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Instance network_info: |[{"id": "960e87a7-42d1-4509-8875-6344407fc457", "address": "fa:16:3e:0f:96:6a", "network": {"id": "41701914-7428-47f2-8fc0-00cec0155d84", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-550490719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b59045366914873aef4d1ea4134fe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap960e87a7-42", "ovs_interfaceid": "960e87a7-42d1-4509-8875-6344407fc457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1731.162523] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:96:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8145bd31-c4a7-4828-8818-d065010c9565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '960e87a7-42d1-4509-8875-6344407fc457', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1731.170297] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Creating folder: Project (5b59045366914873aef4d1ea4134fe46). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1731.171046] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e113da9-7b03-469f-957f-105d3efe85b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.180753] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Created folder: Project (5b59045366914873aef4d1ea4134fe46) in parent group-v370905. [ 1731.180984] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Creating folder: Instances. Parent ref: group-v371138. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1731.181285] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50c84ced-2d83-4d28-aa69-d8e57274a22b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.190014] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Created folder: Instances in parent group-v371138. [ 1731.190341] env[62816]: DEBUG oslo.service.loopingcall [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1731.190601] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1731.190871] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4a1613c4-66ce-4f08-b7f8-93ade65280f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.211361] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1731.211361] env[62816]: value = "task-1788871" [ 1731.211361] env[62816]: _type = "Task" [ 1731.211361] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.218889] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788871, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.234430] env[62816]: INFO nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Took 14.98 seconds to build instance. [ 1731.323843] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.818s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.327017] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.621s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.327620] env[62816]: DEBUG nova.objects.instance [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lazy-loading 'resources' on Instance uuid 9ab4e631-5b31-4b37-9b49-4f0423286752 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1731.350505] env[62816]: INFO nova.scheduler.client.report [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleted allocations for instance 2583e2ba-8904-420c-a417-d6af71bfa9ac [ 1731.383442] env[62816]: DEBUG nova.compute.manager [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Received event network-changed-960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1731.383673] env[62816]: DEBUG nova.compute.manager [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Refreshing instance network info cache due to event network-changed-960e87a7-42d1-4509-8875-6344407fc457. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1731.383894] env[62816]: DEBUG oslo_concurrency.lockutils [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] Acquiring lock "refresh_cache-74c15238-221c-4d1c-8577-4046d5666e45" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.384051] env[62816]: DEBUG oslo_concurrency.lockutils [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] Acquired lock "refresh_cache-74c15238-221c-4d1c-8577-4046d5666e45" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.384217] env[62816]: DEBUG nova.network.neutron [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Refreshing network info cache for port 960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1731.498657] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788868, 'name': ReconfigVM_Task, 'duration_secs': 0.952295} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.498954] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 0acc334c-e400-4b28-8ee7-8d6cafb057e9/0acc334c-e400-4b28-8ee7-8d6cafb057e9.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1731.499718] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c91cf815-67df-4f98-bb82-1167ace99cbc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.506630] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1731.506630] env[62816]: value = "task-1788872" [ 1731.506630] env[62816]: _type = "Task" [ 1731.506630] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.514841] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788872, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.721265] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788871, 'name': CreateVM_Task, 'duration_secs': 0.38208} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.721443] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1731.722134] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.722309] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.722621] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1731.722904] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4116159f-8218-4951-a5bc-3de0086cde1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.727356] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1731.727356] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527d08c9-881c-0da6-f5b4-48005e8d208f" [ 1731.727356] env[62816]: _type = "Task" [ 1731.727356] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.734799] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527d08c9-881c-0da6-f5b4-48005e8d208f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.736303] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.496s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.857452] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4fc80d4e-5968-4525-af9b-8dab357cf0f8 tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "2583e2ba-8904-420c-a417-d6af71bfa9ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.450s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.015490] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788872, 'name': Rename_Task, 'duration_secs': 0.128928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.017766] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1732.018212] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-05c37091-f2f9-4d6a-b931-53c90d6e2c57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.025673] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1732.025673] env[62816]: value = "task-1788873" [ 1732.025673] env[62816]: _type = "Task" [ 1732.025673] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.035569] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788873, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.036952] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f53e2b53-ae08-4d20-9c7d-0b04344d34c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.043328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3400b8-b859-48a2-9cab-a7a4f0dd96bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.077487] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139c92c0-cdaa-42c6-9bd5-0f7f7f2b7777 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.084842] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e63ae5-d824-41f0-9f18-cdff744faf20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.098355] env[62816]: DEBUG nova.compute.provider_tree [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1732.114038] env[62816]: DEBUG nova.network.neutron [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Updated VIF entry in instance network info cache for port 960e87a7-42d1-4509-8875-6344407fc457. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1732.114328] env[62816]: DEBUG nova.network.neutron [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Updating instance_info_cache with network_info: [{"id": "960e87a7-42d1-4509-8875-6344407fc457", "address": "fa:16:3e:0f:96:6a", "network": {"id": "41701914-7428-47f2-8fc0-00cec0155d84", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-550490719-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5b59045366914873aef4d1ea4134fe46", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap960e87a7-42", "ovs_interfaceid": "960e87a7-42d1-4509-8875-6344407fc457", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.245159] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527d08c9-881c-0da6-f5b4-48005e8d208f, 'name': SearchDatastore_Task, 'duration_secs': 0.009904} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.245596] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.245798] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1732.246090] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.246512] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.246744] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1732.247103] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d0d3cdd-bac3-4be6-a0e9-19c58bd6673b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.257454] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1732.257835] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1732.258594] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7365ee50-4ed3-451e-94eb-a45f7890b833 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.266022] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1732.266022] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e4854f-ee65-edc2-92d5-c49b1354ea42" [ 1732.266022] env[62816]: _type = "Task" [ 1732.266022] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.275614] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e4854f-ee65-edc2-92d5-c49b1354ea42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.537374] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788873, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.601699] env[62816]: DEBUG nova.scheduler.client.report [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1732.617068] env[62816]: DEBUG oslo_concurrency.lockutils [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] Releasing lock "refresh_cache-74c15238-221c-4d1c-8577-4046d5666e45" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.617424] env[62816]: DEBUG nova.compute.manager [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.617645] env[62816]: DEBUG nova.compute.manager [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing instance network info cache due to event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1732.617874] env[62816]: DEBUG oslo_concurrency.lockutils [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1732.618024] env[62816]: DEBUG oslo_concurrency.lockutils [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1732.618225] env[62816]: DEBUG nova.network.neutron [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1732.776448] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e4854f-ee65-edc2-92d5-c49b1354ea42, 'name': SearchDatastore_Task, 'duration_secs': 0.012328} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.777225] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b64a59fa-0f91-44fd-849a-3e13e288175c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.782941] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1732.782941] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5227bf2d-2eb0-f9ee-0ac3-7c57e409409b" [ 1732.782941] env[62816]: _type = "Task" [ 1732.782941] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.793073] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5227bf2d-2eb0-f9ee-0ac3-7c57e409409b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.037300] env[62816]: DEBUG oslo_vmware.api [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788873, 'name': PowerOnVM_Task, 'duration_secs': 0.691195} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.037537] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1733.037744] env[62816]: INFO nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1733.037926] env[62816]: DEBUG nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1733.038734] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dca9a2-8fe8-4309-824b-4c2f3a180fa6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.106909] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.132578] env[62816]: INFO nova.scheduler.client.report [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Deleted allocations for instance 9ab4e631-5b31-4b37-9b49-4f0423286752 [ 1733.294025] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5227bf2d-2eb0-f9ee-0ac3-7c57e409409b, 'name': SearchDatastore_Task, 'duration_secs': 0.012405} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.294025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.294363] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 74c15238-221c-4d1c-8577-4046d5666e45/74c15238-221c-4d1c-8577-4046d5666e45.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1733.294485] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f39d4ce3-f2fd-4be1-993d-b35bd6026502 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.301554] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1733.301554] env[62816]: value = "task-1788874" [ 1733.301554] env[62816]: _type = "Task" [ 1733.301554] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.310054] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788874, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.352530] env[62816]: DEBUG nova.network.neutron [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updated VIF entry in instance network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1733.352925] env[62816]: DEBUG nova.network.neutron [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.555352] env[62816]: INFO nova.compute.manager [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Took 17.27 seconds to build instance. [ 1733.642445] env[62816]: DEBUG oslo_concurrency.lockutils [None req-29016349-64a0-4645-918b-f15f53b297da tempest-ListServersNegativeTestJSON-620346658 tempest-ListServersNegativeTestJSON-620346658-project-member] Lock "9ab4e631-5b31-4b37-9b49-4f0423286752" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.304s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.814816] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788874, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.857521] env[62816]: DEBUG oslo_concurrency.lockutils [req-f49c81a7-98e5-4518-9a65-55d63fba803c req-d8c545be-c4ac-4635-a7e3-1689a9949fc0 service nova] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.058109] env[62816]: DEBUG oslo_concurrency.lockutils [None req-51016bbc-32cc-4923-948d-88bf4fe4ea68 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.778s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.312807] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788874, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.846607} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.317095] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 74c15238-221c-4d1c-8577-4046d5666e45/74c15238-221c-4d1c-8577-4046d5666e45.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1734.317095] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1734.317095] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c223cc18-cbcc-4b94-819b-6626d495d35e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.317095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "915127f6-2da7-4eab-a7cb-331a41d04d0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.317095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.317095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "915127f6-2da7-4eab-a7cb-331a41d04d0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.317095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.317848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.321064] env[62816]: INFO nova.compute.manager [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Terminating instance [ 1734.323096] env[62816]: DEBUG nova.compute.manager [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1734.323467] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1734.324422] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8247f5-962c-495d-b866-bd455ba7eaa9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.328743] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1734.328743] env[62816]: value = "task-1788875" [ 1734.328743] env[62816]: _type = "Task" [ 1734.328743] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.334865] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1734.335677] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fe3288a1-8057-4d1f-b197-3609041f2e1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.342734] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.347410] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1734.347410] env[62816]: value = "task-1788876" [ 1734.347410] env[62816]: _type = "Task" [ 1734.347410] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.357887] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.419623] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.419923] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.420159] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.420350] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.420552] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.422893] env[62816]: INFO nova.compute.manager [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Terminating instance [ 1734.424854] env[62816]: DEBUG nova.compute.manager [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1734.425210] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1734.426079] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d1d534-059f-40c5-ab20-f699723e07e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.433977] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1734.434295] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7420afc6-8771-481e-b6c3-94ee784932ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.440752] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1734.440752] env[62816]: value = "task-1788877" [ 1734.440752] env[62816]: _type = "Task" [ 1734.440752] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.450297] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.838722] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142337} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.839182] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1734.840051] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64a2648-6b46-4833-8780-d4bad0d186e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.864319] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 74c15238-221c-4d1c-8577-4046d5666e45/74c15238-221c-4d1c-8577-4046d5666e45.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1734.868069] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7590f019-7f10-44c3-9f69-e461d147c81b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.886015] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788876, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.887260] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1734.887260] env[62816]: value = "task-1788878" [ 1734.887260] env[62816]: _type = "Task" [ 1734.887260] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.896802] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788878, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.949942] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788877, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.364710] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788876, 'name': PowerOffVM_Task, 'duration_secs': 0.827337} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.365052] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1735.365241] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1735.365507] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e1dcb8-3ab4-48c1-95b4-b5c8a0d2d507 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.398408] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788878, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.450560] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788877, 'name': PowerOffVM_Task, 'duration_secs': 0.736912} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.450841] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1735.451021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1735.451272] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85c7500c-f155-4b23-95ed-2e9cdeeb7060 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.738732] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1735.739159] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1735.739496] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleting the datastore file [datastore1] 0acc334c-e400-4b28-8ee7-8d6cafb057e9 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1735.739866] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44ee9229-85af-4989-8996-2927c2453e18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.747807] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1735.747807] env[62816]: value = "task-1788881" [ 1735.747807] env[62816]: _type = "Task" [ 1735.747807] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.751857] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1735.752089] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1735.752271] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleting the datastore file [datastore1] 915127f6-2da7-4eab-a7cb-331a41d04d0e {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1735.752833] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e61cd3b-16f0-4351-b804-559b947120d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.757662] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788881, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.762105] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1735.762105] env[62816]: value = "task-1788882" [ 1735.762105] env[62816]: _type = "Task" [ 1735.762105] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.769348] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.898516] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788878, 'name': ReconfigVM_Task, 'duration_secs': 0.611645} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.898811] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 74c15238-221c-4d1c-8577-4046d5666e45/74c15238-221c-4d1c-8577-4046d5666e45.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1735.899476] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1d3db8c-fc1e-47db-9f6a-6810538df6a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.907592] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1735.907592] env[62816]: value = "task-1788883" [ 1735.907592] env[62816]: _type = "Task" [ 1735.907592] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.920976] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788883, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.258068] env[62816]: DEBUG oslo_vmware.api [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788881, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148994} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.258402] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1736.258604] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1736.258782] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1736.258953] env[62816]: INFO nova.compute.manager [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1736.259312] env[62816]: DEBUG oslo.service.loopingcall [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.259410] env[62816]: DEBUG nova.compute.manager [-] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1736.259492] env[62816]: DEBUG nova.network.neutron [-] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1736.271550] env[62816]: DEBUG oslo_vmware.api [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144214} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.271794] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1736.271962] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1736.272146] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1736.272315] env[62816]: INFO nova.compute.manager [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Took 1.95 seconds to destroy the instance on the hypervisor. [ 1736.272539] env[62816]: DEBUG oslo.service.loopingcall [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.272721] env[62816]: DEBUG nova.compute.manager [-] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1736.272810] env[62816]: DEBUG nova.network.neutron [-] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1736.418636] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788883, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.565812] env[62816]: DEBUG nova.compute.manager [req-3110ca3a-4aaf-4c08-bc3c-c0c7a5caff33 req-23771e30-3839-490b-b108-af61741660d6 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Received event network-vif-deleted-c7e34c23-d9bb-4fb9-b31d-4ac748f7c396 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.566035] env[62816]: INFO nova.compute.manager [req-3110ca3a-4aaf-4c08-bc3c-c0c7a5caff33 req-23771e30-3839-490b-b108-af61741660d6 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Neutron deleted interface c7e34c23-d9bb-4fb9-b31d-4ac748f7c396; detaching it from the instance and deleting it from the info cache [ 1736.566223] env[62816]: DEBUG nova.network.neutron [req-3110ca3a-4aaf-4c08-bc3c-c0c7a5caff33 req-23771e30-3839-490b-b108-af61741660d6 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.586027] env[62816]: DEBUG nova.compute.manager [req-9f0eeb75-5321-40da-a206-0639a3a1273a req-4596a4fa-7866-4502-a04c-2e9abb0185aa service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Received event network-vif-deleted-bea401ef-e47d-48d5-9f02-5b82f1830a7e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.586027] env[62816]: INFO nova.compute.manager [req-9f0eeb75-5321-40da-a206-0639a3a1273a req-4596a4fa-7866-4502-a04c-2e9abb0185aa service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Neutron deleted interface bea401ef-e47d-48d5-9f02-5b82f1830a7e; detaching it from the instance and deleting it from the info cache [ 1736.586027] env[62816]: DEBUG nova.network.neutron [req-9f0eeb75-5321-40da-a206-0639a3a1273a req-4596a4fa-7866-4502-a04c-2e9abb0185aa service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.918985] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788883, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.037822] env[62816]: DEBUG nova.network.neutron [-] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.067785] env[62816]: DEBUG nova.network.neutron [-] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.069665] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6804f1f-8457-4a64-92ab-76ea5073d1b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.079808] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7937a410-c57e-4856-a6a0-56afe889b30e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.090433] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c22c035-7dd7-47ae-96ba-50fbff232fb0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.100575] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868f00c0-a641-47f8-a954-d6e86d1ba669 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.123205] env[62816]: DEBUG nova.compute.manager [req-3110ca3a-4aaf-4c08-bc3c-c0c7a5caff33 req-23771e30-3839-490b-b108-af61741660d6 service nova] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Detach interface failed, port_id=c7e34c23-d9bb-4fb9-b31d-4ac748f7c396, reason: Instance 0acc334c-e400-4b28-8ee7-8d6cafb057e9 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1737.145106] env[62816]: DEBUG nova.compute.manager [req-9f0eeb75-5321-40da-a206-0639a3a1273a req-4596a4fa-7866-4502-a04c-2e9abb0185aa service nova] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Detach interface failed, port_id=bea401ef-e47d-48d5-9f02-5b82f1830a7e, reason: Instance 915127f6-2da7-4eab-a7cb-331a41d04d0e could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1737.420513] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788883, 'name': Rename_Task, 'duration_secs': 1.171918} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.421086] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1737.421086] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3c494b97-3af6-4867-8800-5b87771b1213 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.429408] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1737.429408] env[62816]: value = "task-1788884" [ 1737.429408] env[62816]: _type = "Task" [ 1737.429408] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.438446] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.541343] env[62816]: INFO nova.compute.manager [-] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Took 1.28 seconds to deallocate network for instance. [ 1737.570200] env[62816]: INFO nova.compute.manager [-] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Took 1.30 seconds to deallocate network for instance. [ 1737.938810] env[62816]: DEBUG oslo_vmware.api [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788884, 'name': PowerOnVM_Task, 'duration_secs': 0.496797} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.939105] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1737.939297] env[62816]: INFO nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1737.939512] env[62816]: DEBUG nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1737.940301] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbbe79e-3349-4325-880d-3b4cb188a4ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.049576] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.049885] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.050136] env[62816]: DEBUG nova.objects.instance [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lazy-loading 'resources' on Instance uuid 0acc334c-e400-4b28-8ee7-8d6cafb057e9 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1738.078806] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.458590] env[62816]: INFO nova.compute.manager [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Took 17.37 seconds to build instance. [ 1738.750023] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe30ffc-7f27-4fd8-8713-7aee822e69b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.762194] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d82eaf8-faf4-47b9-8fdb-d2c6e8aa24e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.794402] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5998a7a-fc19-4b10-bf75-4f837024398f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.801761] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab2f1cb-3f07-4ead-bac8-f6bae57fe14f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.814888] env[62816]: DEBUG nova.compute.provider_tree [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1738.961144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-65fb9d83-d18a-487f-b3c4-66d05c85b3c2 tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "74c15238-221c-4d1c-8577-4046d5666e45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.879s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.258248] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "74c15238-221c-4d1c-8577-4046d5666e45" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.258570] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "74c15238-221c-4d1c-8577-4046d5666e45" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.258792] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "74c15238-221c-4d1c-8577-4046d5666e45-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.258979] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "74c15238-221c-4d1c-8577-4046d5666e45-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.259171] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "74c15238-221c-4d1c-8577-4046d5666e45-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.261225] env[62816]: INFO nova.compute.manager [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Terminating instance [ 1739.263608] env[62816]: DEBUG nova.compute.manager [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1739.263805] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1739.264704] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af78a42-2c70-4365-b612-5a3cbf58c1dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.272679] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1739.272909] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e74eef3b-9c73-4ff7-8d42-3b2ef83acade {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.278882] env[62816]: DEBUG oslo_vmware.api [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1739.278882] env[62816]: value = "task-1788885" [ 1739.278882] env[62816]: _type = "Task" [ 1739.278882] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.286558] env[62816]: DEBUG oslo_vmware.api [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.318392] env[62816]: DEBUG nova.scheduler.client.report [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1739.789619] env[62816]: DEBUG oslo_vmware.api [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788885, 'name': PowerOffVM_Task, 'duration_secs': 0.167545} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.789619] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1739.789619] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1739.790018] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4a005bf-0e9b-48a2-b1b0-9548de5b9cd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.823670] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.774s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.825866] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.747s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.826141] env[62816]: DEBUG nova.objects.instance [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lazy-loading 'resources' on Instance uuid 915127f6-2da7-4eab-a7cb-331a41d04d0e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.843098] env[62816]: INFO nova.scheduler.client.report [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted allocations for instance 0acc334c-e400-4b28-8ee7-8d6cafb057e9 [ 1739.945190] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1739.945423] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1739.945609] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Deleting the datastore file [datastore1] 74c15238-221c-4d1c-8577-4046d5666e45 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1739.945899] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e947e41a-f929-4614-b2b2-c968c6482730 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.952988] env[62816]: DEBUG oslo_vmware.api [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for the task: (returnval){ [ 1739.952988] env[62816]: value = "task-1788887" [ 1739.952988] env[62816]: _type = "Task" [ 1739.952988] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.962133] env[62816]: DEBUG oslo_vmware.api [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788887, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.350201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ce0802c-668d-4e01-a86b-0979ab6ca74d tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "0acc334c-e400-4b28-8ee7-8d6cafb057e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.930s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.462916] env[62816]: DEBUG oslo_vmware.api [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Task: {'id': task-1788887, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154309} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.463337] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1740.463337] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1740.463337] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1740.463501] env[62816]: INFO nova.compute.manager [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1740.463862] env[62816]: DEBUG oslo.service.loopingcall [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1740.464044] env[62816]: DEBUG nova.compute.manager [-] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1740.464100] env[62816]: DEBUG nova.network.neutron [-] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1740.524129] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca93a722-fa91-4320-b994-de2255a015f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.531559] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4370d5ae-eed0-4650-aa54-452dce71fcfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.561863] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28abb3b1-6840-4903-8d64-6e6caac7e7b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.569271] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc132e8-d5d0-4e9c-8c01-a1809089eda4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.582680] env[62816]: DEBUG nova.compute.provider_tree [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1740.700638] env[62816]: DEBUG nova.compute.manager [req-885ad91b-4b42-4166-b3c4-dc78d8725e77 req-cf425848-a7a4-4be2-9aca-5a71326402a3 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Received event network-vif-deleted-960e87a7-42d1-4509-8875-6344407fc457 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1740.700902] env[62816]: INFO nova.compute.manager [req-885ad91b-4b42-4166-b3c4-dc78d8725e77 req-cf425848-a7a4-4be2-9aca-5a71326402a3 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Neutron deleted interface 960e87a7-42d1-4509-8875-6344407fc457; detaching it from the instance and deleting it from the info cache [ 1740.701104] env[62816]: DEBUG nova.network.neutron [req-885ad91b-4b42-4166-b3c4-dc78d8725e77 req-cf425848-a7a4-4be2-9aca-5a71326402a3 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.085485] env[62816]: DEBUG nova.scheduler.client.report [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1741.179800] env[62816]: DEBUG nova.network.neutron [-] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.203795] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd11616d-4f82-4b86-88b1-d76d7e0181cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.214282] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93c1a29-4bb2-429e-8d49-d7079a45d0d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.246482] env[62816]: DEBUG nova.compute.manager [req-885ad91b-4b42-4166-b3c4-dc78d8725e77 req-cf425848-a7a4-4be2-9aca-5a71326402a3 service nova] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Detach interface failed, port_id=960e87a7-42d1-4509-8875-6344407fc457, reason: Instance 74c15238-221c-4d1c-8577-4046d5666e45 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1741.590343] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.613916] env[62816]: INFO nova.scheduler.client.report [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted allocations for instance 915127f6-2da7-4eab-a7cb-331a41d04d0e [ 1741.682802] env[62816]: INFO nova.compute.manager [-] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Took 1.22 seconds to deallocate network for instance. [ 1742.121312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cf7f80ca-5736-4243-9016-dc4ee52a5190 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "915127f6-2da7-4eab-a7cb-331a41d04d0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.805s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.188907] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.189249] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.189496] env[62816]: DEBUG nova.objects.instance [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lazy-loading 'resources' on Instance uuid 74c15238-221c-4d1c-8577-4046d5666e45 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.682219] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "d219e1a0-ca18-4315-9178-57953e517936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.682457] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "d219e1a0-ca18-4315-9178-57953e517936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.706389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "56bea284-2871-447b-9bb0-2f57c3053dc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.706615] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.882564] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7834b05d-121f-4747-88a6-563570937be0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.889790] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5c92cbd-dfcb-4d21-acf2-e985449b2858 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.919203] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab7a0fb-eed9-40eb-97d8-0f9c3d7a88d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.926235] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0129d263-f3d0-4592-859a-50aaf2f6a321 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.938968] env[62816]: DEBUG nova.compute.provider_tree [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1743.185137] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1743.208757] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1743.443848] env[62816]: DEBUG nova.scheduler.client.report [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1743.704889] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.726952] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.947761] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.758s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.950141] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.245s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.951718] env[62816]: INFO nova.compute.claims [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1743.964907] env[62816]: INFO nova.scheduler.client.report [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Deleted allocations for instance 74c15238-221c-4d1c-8577-4046d5666e45 [ 1744.471136] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4756fb13-b094-421e-b138-8771b34f569a tempest-InstanceActionsV221TestJSON-987158611 tempest-InstanceActionsV221TestJSON-987158611-project-member] Lock "74c15238-221c-4d1c-8577-4046d5666e45" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.212s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.189221] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d13b1ac-6815-4e28-a2f5-e4393bd01efb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.198151] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48489c5-b761-49f0-9146-514c1ca7e18f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.230806] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ab7764-bd69-4f8b-8841-5529353dcbdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.238422] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ad7787-d0cc-41d5-965f-f703af16bc5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.252461] env[62816]: DEBUG nova.compute.provider_tree [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.755597] env[62816]: DEBUG nova.scheduler.client.report [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1746.261052] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.311s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.261543] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1746.267745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.537s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.267745] env[62816]: INFO nova.compute.claims [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1746.770149] env[62816]: DEBUG nova.compute.utils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1746.773370] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1746.773530] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1746.819264] env[62816]: DEBUG nova.policy [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0fe4b6013bd4f5dac5bccdbe4683b39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f53e13df2c6740cd9666b8d60fdbfd87', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1747.056568] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Successfully created port: e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1747.274388] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1747.507125] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daec1023-d8b3-41cd-af40-47a67288b8f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.520237] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c73156-1c9e-4a4b-8161-ee8300a4374d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.550769] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ecbc31-a7b7-4bb5-9a17-f9e67d072491 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.558535] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b890ac-3867-4b3e-b64e-143c8fda229d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.571988] env[62816]: DEBUG nova.compute.provider_tree [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.078031] env[62816]: DEBUG nova.scheduler.client.report [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1748.289054] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1748.319018] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1748.319479] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1748.319819] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1748.320132] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1748.320443] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1748.321440] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b78c491-1307-4cc6-89e5-1027e89eba30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.330350] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f16ddd80-ab77-4a18-856a-d40dd13df3f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.469059] env[62816]: DEBUG nova.compute.manager [req-cd195605-416c-441f-9ce0-7bfae7b5a94d req-8f5f8450-145e-4d01-b5eb-40fcb0eeaf4d service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Received event network-vif-plugged-e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1748.469327] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd195605-416c-441f-9ce0-7bfae7b5a94d req-8f5f8450-145e-4d01-b5eb-40fcb0eeaf4d service nova] Acquiring lock "d219e1a0-ca18-4315-9178-57953e517936-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.469536] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd195605-416c-441f-9ce0-7bfae7b5a94d req-8f5f8450-145e-4d01-b5eb-40fcb0eeaf4d service nova] Lock "d219e1a0-ca18-4315-9178-57953e517936-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.469660] env[62816]: DEBUG oslo_concurrency.lockutils [req-cd195605-416c-441f-9ce0-7bfae7b5a94d req-8f5f8450-145e-4d01-b5eb-40fcb0eeaf4d service nova] Lock "d219e1a0-ca18-4315-9178-57953e517936-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.469829] env[62816]: DEBUG nova.compute.manager [req-cd195605-416c-441f-9ce0-7bfae7b5a94d req-8f5f8450-145e-4d01-b5eb-40fcb0eeaf4d service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] No waiting events found dispatching network-vif-plugged-e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1748.469994] env[62816]: WARNING nova.compute.manager [req-cd195605-416c-441f-9ce0-7bfae7b5a94d req-8f5f8450-145e-4d01-b5eb-40fcb0eeaf4d service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Received unexpected event network-vif-plugged-e28f5e64-f958-4e70-9495-e7ae487028dd for instance with vm_state building and task_state spawning. [ 1748.571417] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Successfully updated port: e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1748.584180] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.584774] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1748.725504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.726234] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.074448] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "refresh_cache-d219e1a0-ca18-4315-9178-57953e517936" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.074606] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "refresh_cache-d219e1a0-ca18-4315-9178-57953e517936" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.077025] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1749.090108] env[62816]: DEBUG nova.compute.utils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1749.091401] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1749.091573] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1749.131575] env[62816]: DEBUG nova.policy [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0fe4b6013bd4f5dac5bccdbe4683b39', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f53e13df2c6740cd9666b8d60fdbfd87', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1749.228079] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1749.416497] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Successfully created port: 8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1749.595410] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1749.751217] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.751479] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.752973] env[62816]: INFO nova.compute.claims [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1749.832107] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1749.955396] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Updating instance_info_cache with network_info: [{"id": "e28f5e64-f958-4e70-9495-e7ae487028dd", "address": "fa:16:3e:1e:d7:9d", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28f5e64-f9", "ovs_interfaceid": "e28f5e64-f958-4e70-9495-e7ae487028dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.458328] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "refresh_cache-d219e1a0-ca18-4315-9178-57953e517936" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.458730] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Instance network_info: |[{"id": "e28f5e64-f958-4e70-9495-e7ae487028dd", "address": "fa:16:3e:1e:d7:9d", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28f5e64-f9", "ovs_interfaceid": "e28f5e64-f958-4e70-9495-e7ae487028dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1750.459168] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:d7:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e28f5e64-f958-4e70-9495-e7ae487028dd', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1750.466546] env[62816]: DEBUG oslo.service.loopingcall [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.466754] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d219e1a0-ca18-4315-9178-57953e517936] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1750.467402] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-102812bb-762e-4e70-99fa-74ffb91263ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.488286] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1750.488286] env[62816]: value = "task-1788888" [ 1750.488286] env[62816]: _type = "Task" [ 1750.488286] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.495958] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788888, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.501670] env[62816]: DEBUG nova.compute.manager [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Received event network-changed-e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.501670] env[62816]: DEBUG nova.compute.manager [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Refreshing instance network info cache due to event network-changed-e28f5e64-f958-4e70-9495-e7ae487028dd. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1750.501862] env[62816]: DEBUG oslo_concurrency.lockutils [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] Acquiring lock "refresh_cache-d219e1a0-ca18-4315-9178-57953e517936" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.502012] env[62816]: DEBUG oslo_concurrency.lockutils [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] Acquired lock "refresh_cache-d219e1a0-ca18-4315-9178-57953e517936" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.502175] env[62816]: DEBUG nova.network.neutron [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Refreshing network info cache for port e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1750.603939] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1750.629535] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.629819] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.629984] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.630188] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.630457] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.630490] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.630701] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.630851] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.631026] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.631197] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.631794] env[62816]: DEBUG nova.virt.hardware [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.632288] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c07f8b-fe89-4a4c-9a38-4e7553322ead {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.640922] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97656f2-08d3-404b-9390-b9289d38ffc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.969276] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Successfully updated port: 8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1750.973756] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059f9dfa-6782-4ca5-8f3f-8738d1ecba77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.979714] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4040d6-40e5-4e7a-9157-523a7d946968 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.014580] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1178da0f-895b-4d26-ad37-0426e9addd1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.022701] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788888, 'name': CreateVM_Task, 'duration_secs': 0.375236} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.024615] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d219e1a0-ca18-4315-9178-57953e517936] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1751.025312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.025478] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.025805] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1751.026951] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e379f0-e9ec-4176-86a9-3edec8d6644b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.030696] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570778e5-62d7-4d7c-b1d4-9bb973fed150 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.035036] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1751.035036] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c8c52-a423-e880-64bc-5b12af2373f9" [ 1751.035036] env[62816]: _type = "Task" [ 1751.035036] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.042752] env[62816]: DEBUG nova.compute.provider_tree [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1751.053947] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524c8c52-a423-e880-64bc-5b12af2373f9, 'name': SearchDatastore_Task, 'duration_secs': 0.010658} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.054782] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.054782] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1751.054782] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.054951] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.055887] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1751.055887] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01681f19-63db-4929-a198-f27cfcc7c557 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.063164] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1751.063348] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1751.064207] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36057011-71bd-485c-a83c-9a95a0553f08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.068670] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1751.068670] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522de7ea-aeee-245e-0aa9-d6f34e888bcf" [ 1751.068670] env[62816]: _type = "Task" [ 1751.068670] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.076096] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522de7ea-aeee-245e-0aa9-d6f34e888bcf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.209803] env[62816]: DEBUG nova.network.neutron [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Updated VIF entry in instance network info cache for port e28f5e64-f958-4e70-9495-e7ae487028dd. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1751.210423] env[62816]: DEBUG nova.network.neutron [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Updating instance_info_cache with network_info: [{"id": "e28f5e64-f958-4e70-9495-e7ae487028dd", "address": "fa:16:3e:1e:d7:9d", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape28f5e64-f9", "ovs_interfaceid": "e28f5e64-f958-4e70-9495-e7ae487028dd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1751.476187] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "refresh_cache-56bea284-2871-447b-9bb0-2f57c3053dc0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.476187] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "refresh_cache-56bea284-2871-447b-9bb0-2f57c3053dc0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.476453] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1751.548023] env[62816]: DEBUG nova.scheduler.client.report [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1751.580412] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522de7ea-aeee-245e-0aa9-d6f34e888bcf, 'name': SearchDatastore_Task, 'duration_secs': 0.008696} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.581293] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ee5d5b8-1b27-408c-9d68-ed09d5edc0e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.586842] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1751.586842] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520ef657-dfe2-2830-1967-a619bb2a96ab" [ 1751.586842] env[62816]: _type = "Task" [ 1751.586842] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.594533] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520ef657-dfe2-2830-1967-a619bb2a96ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.713782] env[62816]: DEBUG oslo_concurrency.lockutils [req-9e647db2-17e0-4d0a-8202-f1c9fec7f925 req-d2ad980b-9993-46e6-92aa-89037bda8df1 service nova] Releasing lock "refresh_cache-d219e1a0-ca18-4315-9178-57953e517936" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.006416] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1752.053159] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.053682] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1752.098256] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520ef657-dfe2-2830-1967-a619bb2a96ab, 'name': SearchDatastore_Task, 'duration_secs': 0.00955} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.098526] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.098868] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d219e1a0-ca18-4315-9178-57953e517936/d219e1a0-ca18-4315-9178-57953e517936.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1752.099159] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e610ee37-63b0-4468-ac84-d2041bd4fd93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.106065] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1752.106065] env[62816]: value = "task-1788889" [ 1752.106065] env[62816]: _type = "Task" [ 1752.106065] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.118793] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788889, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.151029] env[62816]: DEBUG nova.network.neutron [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Updating instance_info_cache with network_info: [{"id": "8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b", "address": "fa:16:3e:44:74:65", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bc9f279-2a", "ovs_interfaceid": "8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.529361] env[62816]: DEBUG nova.compute.manager [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Received event network-vif-plugged-8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.529684] env[62816]: DEBUG oslo_concurrency.lockutils [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] Acquiring lock "56bea284-2871-447b-9bb0-2f57c3053dc0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.529829] env[62816]: DEBUG oslo_concurrency.lockutils [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.530239] env[62816]: DEBUG oslo_concurrency.lockutils [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.530439] env[62816]: DEBUG nova.compute.manager [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] No waiting events found dispatching network-vif-plugged-8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1752.530619] env[62816]: WARNING nova.compute.manager [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Received unexpected event network-vif-plugged-8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b for instance with vm_state building and task_state spawning. [ 1752.530780] env[62816]: DEBUG nova.compute.manager [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Received event network-changed-8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.530934] env[62816]: DEBUG nova.compute.manager [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Refreshing instance network info cache due to event network-changed-8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1752.531118] env[62816]: DEBUG oslo_concurrency.lockutils [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] Acquiring lock "refresh_cache-56bea284-2871-447b-9bb0-2f57c3053dc0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.558910] env[62816]: DEBUG nova.compute.utils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1752.560898] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1752.561214] env[62816]: DEBUG nova.network.neutron [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1752.604405] env[62816]: DEBUG nova.policy [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31efed5435754520bf3a18efbac808b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10a1ee60e491412dbcdf9e2203ae7000', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1752.615830] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788889, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.654325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "refresh_cache-56bea284-2871-447b-9bb0-2f57c3053dc0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.654703] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Instance network_info: |[{"id": "8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b", "address": "fa:16:3e:44:74:65", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bc9f279-2a", "ovs_interfaceid": "8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1752.655075] env[62816]: DEBUG oslo_concurrency.lockutils [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] Acquired lock "refresh_cache-56bea284-2871-447b-9bb0-2f57c3053dc0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.655287] env[62816]: DEBUG nova.network.neutron [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Refreshing network info cache for port 8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.656774] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:44:74:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '578f2ebc-5719-4d31-9bac-d3d247f9293f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1752.664855] env[62816]: DEBUG oslo.service.loopingcall [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1752.666240] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1752.666471] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7237e381-4ccc-4cde-95c8-4681cf82841b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.687848] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1752.687848] env[62816]: value = "task-1788890" [ 1752.687848] env[62816]: _type = "Task" [ 1752.687848] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.699261] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788890, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.863454] env[62816]: DEBUG nova.network.neutron [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Successfully created port: 75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1753.061692] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1753.116668] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788889, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595965} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.116951] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d219e1a0-ca18-4315-9178-57953e517936/d219e1a0-ca18-4315-9178-57953e517936.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1753.117179] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1753.117420] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-081fd538-12e6-4672-b8fc-30063397d9b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.123165] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1753.123165] env[62816]: value = "task-1788891" [ 1753.123165] env[62816]: _type = "Task" [ 1753.123165] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.131285] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788891, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.197843] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788890, 'name': CreateVM_Task, 'duration_secs': 0.418434} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.200168] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1753.200841] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.201032] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.201316] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1753.201843] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea0eda2f-76f8-4949-8c07-fc7b7cb7d326 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.206367] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1753.206367] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52da4c4c-8a55-9cab-f758-5c3ab7efb3ef" [ 1753.206367] env[62816]: _type = "Task" [ 1753.206367] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.214631] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52da4c4c-8a55-9cab-f758-5c3ab7efb3ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.378924] env[62816]: DEBUG nova.network.neutron [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Updated VIF entry in instance network info cache for port 8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1753.379318] env[62816]: DEBUG nova.network.neutron [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Updating instance_info_cache with network_info: [{"id": "8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b", "address": "fa:16:3e:44:74:65", "network": {"id": "b7dafaea-4b50-45bd-9179-5c7fb39be765", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1377111076-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f53e13df2c6740cd9666b8d60fdbfd87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "578f2ebc-5719-4d31-9bac-d3d247f9293f", "external-id": "nsx-vlan-transportzone-58", "segmentation_id": 58, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bc9f279-2a", "ovs_interfaceid": "8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1753.634443] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788891, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069495} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.634781] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1753.635490] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504db1b9-620b-4785-a50f-0ceebcc4e5bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.656863] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] d219e1a0-ca18-4315-9178-57953e517936/d219e1a0-ca18-4315-9178-57953e517936.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1753.657110] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3079e3f-6b89-41fa-be64-e06738ca87d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.675963] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1753.675963] env[62816]: value = "task-1788892" [ 1753.675963] env[62816]: _type = "Task" [ 1753.675963] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.683282] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788892, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.716714] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52da4c4c-8a55-9cab-f758-5c3ab7efb3ef, 'name': SearchDatastore_Task, 'duration_secs': 0.009726} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.716714] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1753.716714] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1753.717063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1753.717063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1753.717159] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1753.717331] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6650d18b-3223-4c5c-8d4c-5cd6e903d4d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.725373] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1753.725549] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1753.726241] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d99cf9a-ec00-4f4d-a967-3d581dde565f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.731138] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1753.731138] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526d7401-7d46-dd26-a7b8-2a2706065ea0" [ 1753.731138] env[62816]: _type = "Task" [ 1753.731138] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.738421] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526d7401-7d46-dd26-a7b8-2a2706065ea0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.881859] env[62816]: DEBUG oslo_concurrency.lockutils [req-aae6b3ed-6bc3-4241-8612-172fa7b1de44 req-7bdfdab0-ac3f-4f58-a85c-fb7d497dcc01 service nova] Releasing lock "refresh_cache-56bea284-2871-447b-9bb0-2f57c3053dc0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.070873] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1754.097124] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1754.097493] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1754.097571] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1754.097724] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1754.097872] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1754.098037] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1754.098258] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1754.098421] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1754.098588] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1754.098804] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1754.098999] env[62816]: DEBUG nova.virt.hardware [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1754.099838] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc1aed9-943c-4e5f-a47a-4cc66eb08d3f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.107275] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e956f959-350f-41a3-8912-e69762af1807 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.185831] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788892, 'name': ReconfigVM_Task, 'duration_secs': 0.277238} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.186135] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Reconfigured VM instance instance-00000054 to attach disk [datastore1] d219e1a0-ca18-4315-9178-57953e517936/d219e1a0-ca18-4315-9178-57953e517936.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1754.186796] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3396cfd-213e-46ce-a489-15cf02013b28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.193749] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1754.193749] env[62816]: value = "task-1788893" [ 1754.193749] env[62816]: _type = "Task" [ 1754.193749] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.201876] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788893, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.240882] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526d7401-7d46-dd26-a7b8-2a2706065ea0, 'name': SearchDatastore_Task, 'duration_secs': 0.008444} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.241674] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30544486-26cd-4332-85ff-f282cb28842e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.246848] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1754.246848] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e57b17-4afb-14bd-887b-2d8cdbc1e517" [ 1754.246848] env[62816]: _type = "Task" [ 1754.246848] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.254482] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e57b17-4afb-14bd-887b-2d8cdbc1e517, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.370990] env[62816]: DEBUG nova.network.neutron [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Successfully updated port: 75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1754.554829] env[62816]: DEBUG nova.compute.manager [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Received event network-vif-plugged-75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1754.555211] env[62816]: DEBUG oslo_concurrency.lockutils [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] Acquiring lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.555471] env[62816]: DEBUG oslo_concurrency.lockutils [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.555652] env[62816]: DEBUG oslo_concurrency.lockutils [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.555824] env[62816]: DEBUG nova.compute.manager [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] No waiting events found dispatching network-vif-plugged-75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1754.555993] env[62816]: WARNING nova.compute.manager [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Received unexpected event network-vif-plugged-75829904-7a65-435d-a888-10e80563e07e for instance with vm_state building and task_state spawning. [ 1754.556175] env[62816]: DEBUG nova.compute.manager [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Received event network-changed-75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1754.556322] env[62816]: DEBUG nova.compute.manager [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Refreshing instance network info cache due to event network-changed-75829904-7a65-435d-a888-10e80563e07e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1754.556505] env[62816]: DEBUG oslo_concurrency.lockutils [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] Acquiring lock "refresh_cache-bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.556643] env[62816]: DEBUG oslo_concurrency.lockutils [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] Acquired lock "refresh_cache-bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.556798] env[62816]: DEBUG nova.network.neutron [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Refreshing network info cache for port 75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1754.707639] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788893, 'name': Rename_Task, 'duration_secs': 0.133295} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.707968] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1754.708194] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6d24c06f-307d-4728-9a22-20e3606ae203 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.716586] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1754.716586] env[62816]: value = "task-1788894" [ 1754.716586] env[62816]: _type = "Task" [ 1754.716586] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.724771] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.756482] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e57b17-4afb-14bd-887b-2d8cdbc1e517, 'name': SearchDatastore_Task, 'duration_secs': 0.008845} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.756911] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.757244] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 56bea284-2871-447b-9bb0-2f57c3053dc0/56bea284-2871-447b-9bb0-2f57c3053dc0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1754.757646] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fab6d8ca-3293-4594-bd7c-6b18eba4e1ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.764566] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1754.764566] env[62816]: value = "task-1788895" [ 1754.764566] env[62816]: _type = "Task" [ 1754.764566] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.775842] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788895, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.874119] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "refresh_cache-bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.091520] env[62816]: DEBUG nova.network.neutron [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1755.179108] env[62816]: DEBUG nova.network.neutron [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1755.227419] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788894, 'name': PowerOnVM_Task, 'duration_secs': 0.507076} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.228067] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1755.228463] env[62816]: INFO nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Took 6.94 seconds to spawn the instance on the hypervisor. [ 1755.228817] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1755.229808] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60061219-2d33-402d-872f-a2b3d4034f29 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.275583] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788895, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489066} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.275583] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 56bea284-2871-447b-9bb0-2f57c3053dc0/56bea284-2871-447b-9bb0-2f57c3053dc0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1755.275796] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1755.275926] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-038cd8ca-2ae9-486b-a771-5ccddf24b554 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.281991] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1755.281991] env[62816]: value = "task-1788896" [ 1755.281991] env[62816]: _type = "Task" [ 1755.281991] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.290351] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.681702] env[62816]: DEBUG oslo_concurrency.lockutils [req-6c353881-a416-434f-854e-4f37f7f8b5a7 req-8f823eef-24cb-495d-976a-03c8126aa38c service nova] Releasing lock "refresh_cache-bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.682120] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquired lock "refresh_cache-bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.682289] env[62816]: DEBUG nova.network.neutron [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1755.749066] env[62816]: INFO nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Took 12.06 seconds to build instance. [ 1755.792508] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.056151} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.792825] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1755.793724] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa88dfa-90c2-4209-b50e-ca107128e863 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.815733] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 56bea284-2871-447b-9bb0-2f57c3053dc0/56bea284-2871-447b-9bb0-2f57c3053dc0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1755.815984] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5eb2e554-eb31-46c4-8520-d830e52474c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.834443] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1755.834443] env[62816]: value = "task-1788897" [ 1755.834443] env[62816]: _type = "Task" [ 1755.834443] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.845079] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788897, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.214709] env[62816]: DEBUG nova.network.neutron [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1756.250993] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "d219e1a0-ca18-4315-9178-57953e517936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.568s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1756.345218] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788897, 'name': ReconfigVM_Task, 'duration_secs': 0.260529} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.345500] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 56bea284-2871-447b-9bb0-2f57c3053dc0/56bea284-2871-447b-9bb0-2f57c3053dc0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1756.346116] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bb5a14ec-684d-46db-b0e6-f819ce573c0e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.353192] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1756.353192] env[62816]: value = "task-1788898" [ 1756.353192] env[62816]: _type = "Task" [ 1756.353192] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.361014] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788898, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.592802] env[62816]: DEBUG nova.network.neutron [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Updating instance_info_cache with network_info: [{"id": "75829904-7a65-435d-a888-10e80563e07e", "address": "fa:16:3e:b2:43:50", "network": {"id": "3c15f25e-5f0f-4013-8e80-67f844e212b7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-430860096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10a1ee60e491412dbcdf9e2203ae7000", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75829904-7a", "ovs_interfaceid": "75829904-7a65-435d-a888-10e80563e07e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.863248] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788898, 'name': Rename_Task, 'duration_secs': 0.143483} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.863552] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1756.863777] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-94139a95-8218-424a-ad76-ad2653a7d85c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.870460] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1756.870460] env[62816]: value = "task-1788899" [ 1756.870460] env[62816]: _type = "Task" [ 1756.870460] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.877656] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788899, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.095659] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Releasing lock "refresh_cache-bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.096068] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Instance network_info: |[{"id": "75829904-7a65-435d-a888-10e80563e07e", "address": "fa:16:3e:b2:43:50", "network": {"id": "3c15f25e-5f0f-4013-8e80-67f844e212b7", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-430860096-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10a1ee60e491412dbcdf9e2203ae7000", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13e71dbb-4279-427c-b39d-ba5df9895e58", "external-id": "nsx-vlan-transportzone-417", "segmentation_id": 417, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75829904-7a", "ovs_interfaceid": "75829904-7a65-435d-a888-10e80563e07e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1757.096511] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:43:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13e71dbb-4279-427c-b39d-ba5df9895e58', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75829904-7a65-435d-a888-10e80563e07e', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1757.104172] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Creating folder: Project (10a1ee60e491412dbcdf9e2203ae7000). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1757.104437] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a28fcec1-958a-4a6e-b5b0-43953315a0cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.115207] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Created folder: Project (10a1ee60e491412dbcdf9e2203ae7000) in parent group-v370905. [ 1757.115396] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Creating folder: Instances. Parent ref: group-v371143. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1757.115621] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-767cd30a-1c2d-4d6d-bf9d-2265d741d651 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.124109] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Created folder: Instances in parent group-v371143. [ 1757.124336] env[62816]: DEBUG oslo.service.loopingcall [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1757.124517] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1757.124705] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3fd242f4-ade4-4bff-b83e-d29410dbd867 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.143849] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1757.143849] env[62816]: value = "task-1788902" [ 1757.143849] env[62816]: _type = "Task" [ 1757.143849] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.151392] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788902, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.380089] env[62816]: DEBUG oslo_vmware.api [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788899, 'name': PowerOnVM_Task, 'duration_secs': 0.431782} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.380435] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1757.380659] env[62816]: INFO nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Took 6.78 seconds to spawn the instance on the hypervisor. [ 1757.380869] env[62816]: DEBUG nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1757.381690] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4494d8-dad7-42ce-a68a-e0c77450f887 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.654128] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788902, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.903784] env[62816]: INFO nova.compute.manager [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Took 14.19 seconds to build instance. [ 1758.154352] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788902, 'name': CreateVM_Task, 'duration_secs': 0.544444} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.154489] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1758.155178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.155412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.155738] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1758.155996] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2dd95bd-8982-45df-b525-b89e1a0b27d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.160629] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1758.160629] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520fdb29-d5c5-ebad-1899-2b2ad3d55946" [ 1758.160629] env[62816]: _type = "Task" [ 1758.160629] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.168657] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520fdb29-d5c5-ebad-1899-2b2ad3d55946, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.407484] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43e0ac0a-605f-4132-b96c-be006db661f0 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.700s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.672861] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520fdb29-d5c5-ebad-1899-2b2ad3d55946, 'name': SearchDatastore_Task, 'duration_secs': 0.009552} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.673200] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1758.673441] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1758.673682] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.673832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.674026] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1758.674291] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5b820672-1e68-4349-84d8-8ce7b6f8408b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.682776] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1758.682893] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1758.683662] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-048c06c4-2c7d-4e6f-9caa-af4211bbe62a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.688666] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1758.688666] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525352a6-b58d-b92e-83f0-5750f68fb80b" [ 1758.688666] env[62816]: _type = "Task" [ 1758.688666] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.697644] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525352a6-b58d-b92e-83f0-5750f68fb80b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.199594] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525352a6-b58d-b92e-83f0-5750f68fb80b, 'name': SearchDatastore_Task, 'duration_secs': 0.008326} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.200450] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06a21cf1-f039-4856-bb3e-c9835b788a04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.205373] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1759.205373] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525fe5a2-45f4-953b-e930-afe5a7cee51e" [ 1759.205373] env[62816]: _type = "Task" [ 1759.205373] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.212846] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525fe5a2-45f4-953b-e930-afe5a7cee51e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.329621] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "d219e1a0-ca18-4315-9178-57953e517936" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.329900] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "d219e1a0-ca18-4315-9178-57953e517936" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.330138] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "d219e1a0-ca18-4315-9178-57953e517936-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.330329] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "d219e1a0-ca18-4315-9178-57953e517936-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.330503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "d219e1a0-ca18-4315-9178-57953e517936-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.332621] env[62816]: INFO nova.compute.manager [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Terminating instance [ 1759.334344] env[62816]: DEBUG nova.compute.manager [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1759.334538] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1759.335418] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1b40bb-1275-4919-9d84-a7f3135f91a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.342949] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1759.343179] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd91e06a-fdeb-496b-974e-c1945a1a9ada {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.349375] env[62816]: DEBUG oslo_vmware.api [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1759.349375] env[62816]: value = "task-1788903" [ 1759.349375] env[62816]: _type = "Task" [ 1759.349375] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.356993] env[62816]: DEBUG oslo_vmware.api [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788903, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.394731] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "56bea284-2871-447b-9bb0-2f57c3053dc0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.395058] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.395293] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "56bea284-2871-447b-9bb0-2f57c3053dc0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.395495] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.395674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.397820] env[62816]: INFO nova.compute.manager [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Terminating instance [ 1759.399681] env[62816]: DEBUG nova.compute.manager [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1759.399882] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1759.400724] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c86dfe5-a2b1-4ed2-920e-d5285566c2f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.408447] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1759.408688] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-108cbe6f-6f2f-4ade-8bca-b435dff43673 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.415700] env[62816]: DEBUG oslo_vmware.api [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1759.415700] env[62816]: value = "task-1788904" [ 1759.415700] env[62816]: _type = "Task" [ 1759.415700] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.423529] env[62816]: DEBUG oslo_vmware.api [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.715743] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525fe5a2-45f4-953b-e930-afe5a7cee51e, 'name': SearchDatastore_Task, 'duration_secs': 0.00908} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.716026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.716301] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] bf0a39dc-634b-4ace-b11c-ebf50ef9b86b/bf0a39dc-634b-4ace-b11c-ebf50ef9b86b.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1759.716569] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-245d0cae-06b2-4d0e-a4f0-0435e53f9efd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.723481] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1759.723481] env[62816]: value = "task-1788905" [ 1759.723481] env[62816]: _type = "Task" [ 1759.723481] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.731182] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.861038] env[62816]: DEBUG oslo_vmware.api [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788903, 'name': PowerOffVM_Task, 'duration_secs': 0.201268} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.861038] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1759.861038] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1759.861038] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46496a4f-3e52-4c1e-817d-7beaf2438c9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.896013] env[62816]: DEBUG oslo_concurrency.lockutils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1759.896297] env[62816]: DEBUG oslo_concurrency.lockutils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.928662] env[62816]: DEBUG oslo_vmware.api [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788904, 'name': PowerOffVM_Task, 'duration_secs': 0.218443} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.928989] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1759.929189] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1759.929462] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb7aecf1-6b33-4d80-ad5d-d892fdf8a559 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.944902] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1759.945137] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1759.945272] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleting the datastore file [datastore1] d219e1a0-ca18-4315-9178-57953e517936 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1759.945536] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9cc9e18b-6b6b-449b-a593-dd198f05689b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.952995] env[62816]: DEBUG oslo_vmware.api [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1759.952995] env[62816]: value = "task-1788908" [ 1759.952995] env[62816]: _type = "Task" [ 1759.952995] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.962883] env[62816]: DEBUG oslo_vmware.api [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788908, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.086210] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1760.086540] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1760.086695] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleting the datastore file [datastore1] 56bea284-2871-447b-9bb0-2f57c3053dc0 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1760.086967] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33fd2749-8b5d-4b68-acbe-0cbc0b0e7049 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.094660] env[62816]: DEBUG oslo_vmware.api [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for the task: (returnval){ [ 1760.094660] env[62816]: value = "task-1788909" [ 1760.094660] env[62816]: _type = "Task" [ 1760.094660] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.104493] env[62816]: DEBUG oslo_vmware.api [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.233773] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470747} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.234145] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] bf0a39dc-634b-4ace-b11c-ebf50ef9b86b/bf0a39dc-634b-4ace-b11c-ebf50ef9b86b.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1760.234383] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1760.234660] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9925816-a888-40b0-9085-345fa76b3a41 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.241747] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1760.241747] env[62816]: value = "task-1788910" [ 1760.241747] env[62816]: _type = "Task" [ 1760.241747] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.249553] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788910, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.399171] env[62816]: DEBUG nova.compute.utils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1760.462869] env[62816]: DEBUG oslo_vmware.api [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788908, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.351311} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.463146] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1760.463341] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1760.463521] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1760.463698] env[62816]: INFO nova.compute.manager [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: d219e1a0-ca18-4315-9178-57953e517936] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1760.463935] env[62816]: DEBUG oslo.service.loopingcall [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1760.464136] env[62816]: DEBUG nova.compute.manager [-] [instance: d219e1a0-ca18-4315-9178-57953e517936] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1760.464232] env[62816]: DEBUG nova.network.neutron [-] [instance: d219e1a0-ca18-4315-9178-57953e517936] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1760.604668] env[62816]: DEBUG oslo_vmware.api [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Task: {'id': task-1788909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22046} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.604794] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1760.604958] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1760.608681] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1760.608923] env[62816]: INFO nova.compute.manager [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1760.609202] env[62816]: DEBUG oslo.service.loopingcall [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1760.609411] env[62816]: DEBUG nova.compute.manager [-] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1760.609507] env[62816]: DEBUG nova.network.neutron [-] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1760.712426] env[62816]: DEBUG nova.objects.instance [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lazy-loading 'flavor' on Instance uuid 0dbf907f-0313-435c-a8be-19f7e48ded76 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.751356] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788910, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084118} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.751479] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1760.752136] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b1255f-82cb-4062-b0b8-cb62a773579a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.774122] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] bf0a39dc-634b-4ace-b11c-ebf50ef9b86b/bf0a39dc-634b-4ace-b11c-ebf50ef9b86b.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1760.775446] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f28df20-a20f-40b4-9876-3fbfcc3c4e8b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.790445] env[62816]: DEBUG nova.compute.manager [req-052caf9e-be3e-44ba-a161-bce97c22b236 req-3e4bbab2-f1e3-4efc-9ab8-7fb608eb2604 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Received event network-vif-deleted-e28f5e64-f958-4e70-9495-e7ae487028dd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1760.790665] env[62816]: INFO nova.compute.manager [req-052caf9e-be3e-44ba-a161-bce97c22b236 req-3e4bbab2-f1e3-4efc-9ab8-7fb608eb2604 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Neutron deleted interface e28f5e64-f958-4e70-9495-e7ae487028dd; detaching it from the instance and deleting it from the info cache [ 1760.790840] env[62816]: DEBUG nova.network.neutron [req-052caf9e-be3e-44ba-a161-bce97c22b236 req-3e4bbab2-f1e3-4efc-9ab8-7fb608eb2604 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.797577] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1760.797577] env[62816]: value = "task-1788911" [ 1760.797577] env[62816]: _type = "Task" [ 1760.797577] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.807553] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788911, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.849947] env[62816]: DEBUG nova.compute.manager [req-a3a0a245-3551-49e7-b800-b0b379e60da7 req-0c8d5eb7-3e50-4b7e-b99a-4b12ed7682d0 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Received event network-vif-deleted-8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1760.849947] env[62816]: INFO nova.compute.manager [req-a3a0a245-3551-49e7-b800-b0b379e60da7 req-0c8d5eb7-3e50-4b7e-b99a-4b12ed7682d0 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Neutron deleted interface 8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b; detaching it from the instance and deleting it from the info cache [ 1760.850142] env[62816]: DEBUG nova.network.neutron [req-a3a0a245-3551-49e7-b800-b0b379e60da7 req-0c8d5eb7-3e50-4b7e-b99a-4b12ed7682d0 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.901868] env[62816]: DEBUG oslo_concurrency.lockutils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.217075] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.217256] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.238534] env[62816]: DEBUG nova.network.neutron [-] [instance: d219e1a0-ca18-4315-9178-57953e517936] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.293164] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-30aee6dc-93df-470b-94e1-a158bbe83dba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.306118] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d64e1c-804c-4c78-afd9-ec37401c45ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.321750] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788911, 'name': ReconfigVM_Task, 'duration_secs': 0.255158} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.322232] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Reconfigured VM instance instance-00000056 to attach disk [datastore1] bf0a39dc-634b-4ace-b11c-ebf50ef9b86b/bf0a39dc-634b-4ace-b11c-ebf50ef9b86b.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1761.322900] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93573ea8-c89e-47a6-baf1-ea587e4d618b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.326898] env[62816]: DEBUG nova.network.neutron [-] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.340047] env[62816]: DEBUG nova.compute.manager [req-052caf9e-be3e-44ba-a161-bce97c22b236 req-3e4bbab2-f1e3-4efc-9ab8-7fb608eb2604 service nova] [instance: d219e1a0-ca18-4315-9178-57953e517936] Detach interface failed, port_id=e28f5e64-f958-4e70-9495-e7ae487028dd, reason: Instance d219e1a0-ca18-4315-9178-57953e517936 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1761.340541] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1761.340541] env[62816]: value = "task-1788912" [ 1761.340541] env[62816]: _type = "Task" [ 1761.340541] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.341535] env[62816]: INFO nova.compute.manager [-] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Took 0.73 seconds to deallocate network for instance. [ 1761.352972] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d348b362-cb29-4945-b8fb-3e2f3319bacf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.357697] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788912, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.364403] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3443474-9c86-48a8-a603-6cb93dc9d6ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.397099] env[62816]: DEBUG nova.compute.manager [req-a3a0a245-3551-49e7-b800-b0b379e60da7 req-0c8d5eb7-3e50-4b7e-b99a-4b12ed7682d0 service nova] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Detach interface failed, port_id=8bc9f279-2ab1-4c1a-b5f3-992d33a0fe6b, reason: Instance 56bea284-2871-447b-9bb0-2f57c3053dc0 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1761.612367] env[62816]: DEBUG nova.network.neutron [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1761.741408] env[62816]: INFO nova.compute.manager [-] [instance: d219e1a0-ca18-4315-9178-57953e517936] Took 1.28 seconds to deallocate network for instance. [ 1761.853878] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.854170] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.854397] env[62816]: DEBUG nova.objects.instance [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lazy-loading 'resources' on Instance uuid 56bea284-2871-447b-9bb0-2f57c3053dc0 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1761.855457] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788912, 'name': Rename_Task, 'duration_secs': 0.124539} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.855893] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1761.856142] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae50894d-6bd1-432a-9732-7b69c0573273 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.862394] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1761.862394] env[62816]: value = "task-1788913" [ 1761.862394] env[62816]: _type = "Task" [ 1761.862394] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.869988] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788913, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.976800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.977075] env[62816]: DEBUG oslo_concurrency.lockutils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.977340] env[62816]: INFO nova.compute.manager [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Attaching volume 7d451747-ece2-4770-9e36-d259b810e2df to /dev/sdb [ 1762.009034] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd898fc-34ee-4f8b-8bb2-3ac292ac7f73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.015713] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cbd0cc-37d0-4994-8333-b1530d0e26e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.028669] env[62816]: DEBUG nova.virt.block_device [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updating existing volume attachment record: bd53de02-53f3-4b91-8e35-b32842caba1c {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1762.248168] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.371818] env[62816]: DEBUG oslo_vmware.api [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788913, 'name': PowerOnVM_Task, 'duration_secs': 0.431272} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1762.372111] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1762.372316] env[62816]: INFO nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1762.372492] env[62816]: DEBUG nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1762.373311] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c023e2e4-1d6a-4a6f-aa4e-efe65a5dd6b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.389967] env[62816]: DEBUG nova.network.neutron [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.580047] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4268200-e419-4a68-8a3f-76f3d0cdb2dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.587975] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee59d0b-7fd1-4566-86e3-7a4d515719c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.618540] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dca4d6b-3104-4389-b5fc-e7f5978ba2dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.626381] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8d7160-009b-4093-aa84-1f6fe9db832c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.639948] env[62816]: DEBUG nova.compute.provider_tree [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1762.875594] env[62816]: DEBUG nova.compute.manager [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received event network-changed-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1762.875892] env[62816]: DEBUG nova.compute.manager [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing instance network info cache due to event network-changed-fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1762.876188] env[62816]: DEBUG oslo_concurrency.lockutils [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1762.891229] env[62816]: INFO nova.compute.manager [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Took 13.16 seconds to build instance. [ 1762.892294] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.892474] env[62816]: DEBUG nova.compute.manager [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Inject network info {{(pid=62816) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1762.892849] env[62816]: DEBUG nova.compute.manager [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] network_info to inject: |[{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1762.897700] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Reconfiguring VM instance to set the machine id {{(pid=62816) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1762.899239] env[62816]: DEBUG oslo_concurrency.lockutils [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.900336] env[62816]: DEBUG nova.network.neutron [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing network info cache for port fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1762.901313] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-142f22f0-e445-4959-aa34-91ef65aa1bfb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.917809] env[62816]: DEBUG oslo_vmware.api [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1762.917809] env[62816]: value = "task-1788917" [ 1762.917809] env[62816]: _type = "Task" [ 1762.917809] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.927230] env[62816]: DEBUG oslo_vmware.api [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788917, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.935386] env[62816]: DEBUG nova.objects.instance [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lazy-loading 'flavor' on Instance uuid 0dbf907f-0313-435c-a8be-19f7e48ded76 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.143438] env[62816]: DEBUG nova.scheduler.client.report [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1763.280261] env[62816]: INFO nova.compute.manager [None req-55dced58-98b2-44af-ae45-e0d6fb44b118 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Get console output [ 1763.280895] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-55dced58-98b2-44af-ae45-e0d6fb44b118 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] The console log is missing. Check your VSPC configuration [ 1763.400242] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a02b5148-8105-4cf8-b350-362c4df0a299 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.674s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.430534] env[62816]: DEBUG oslo_vmware.api [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788917, 'name': ReconfigVM_Task, 'duration_secs': 0.165411} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.430969] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed1c1b70-5014-47a1-af83-1259e9483e67 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Reconfigured VM instance to set the machine id {{(pid=62816) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1763.439917] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.648312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.650643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.403s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.650791] env[62816]: DEBUG nova.objects.instance [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lazy-loading 'resources' on Instance uuid d219e1a0-ca18-4315-9178-57953e517936 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.659230] env[62816]: DEBUG nova.network.neutron [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updated VIF entry in instance network info cache for port fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1763.659678] env[62816]: DEBUG nova.network.neutron [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.673316] env[62816]: INFO nova.scheduler.client.report [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted allocations for instance 56bea284-2871-447b-9bb0-2f57c3053dc0 [ 1764.162266] env[62816]: DEBUG oslo_concurrency.lockutils [req-3b3d0fa0-7785-4fa5-a442-ba5ca6ccaffb req-801d52bb-ee3f-4b63-bd83-0e0423f42eaa service nova] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.162659] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.180528] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c803fd34-58c7-4662-822f-8a8c074bcb1c tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "56bea284-2871-447b-9bb0-2f57c3053dc0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.785s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.236432] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.236763] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.237030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.237240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.238249] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.240399] env[62816]: INFO nova.compute.manager [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Terminating instance [ 1764.242208] env[62816]: DEBUG nova.compute.manager [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1764.242407] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1764.244027] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4b6465-24cd-4668-9036-b228d61bb614 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.251517] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1764.251759] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-693de530-36e0-4ef6-91b3-526d77851041 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.258098] env[62816]: DEBUG oslo_vmware.api [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1764.258098] env[62816]: value = "task-1788918" [ 1764.258098] env[62816]: _type = "Task" [ 1764.258098] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.268379] env[62816]: DEBUG oslo_vmware.api [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.346776] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "f9d9593a-1c25-47a1-98fd-4462a851f134" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.347155] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.347368] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "f9d9593a-1c25-47a1-98fd-4462a851f134-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.347589] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.347761] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.352421] env[62816]: INFO nova.compute.manager [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Terminating instance [ 1764.356949] env[62816]: DEBUG nova.compute.manager [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1764.357114] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1764.358011] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08964119-a3d1-4793-814b-fc65d5c59067 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.371263] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1764.371576] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0d5babd-762c-4589-8492-810318cc92dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.377672] env[62816]: DEBUG oslo_vmware.api [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1764.377672] env[62816]: value = "task-1788919" [ 1764.377672] env[62816]: _type = "Task" [ 1764.377672] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.385496] env[62816]: DEBUG oslo_vmware.api [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788919, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.450051] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05dd763-6576-4d54-abfa-7e1383ff56b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.458042] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1764.458042] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1764.458042] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1764.458042] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1764.463986] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6599d28-8ede-4b0e-8751-8e23640b504f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.496155] env[62816]: DEBUG nova.network.neutron [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1764.499174] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c31740-6750-41ab-9a59-1a3515be08ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.506945] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bc3760-6eb4-4277-950b-15d5bd4ef958 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.522114] env[62816]: DEBUG nova.compute.provider_tree [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1764.768567] env[62816]: DEBUG oslo_vmware.api [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788918, 'name': PowerOffVM_Task, 'duration_secs': 0.193852} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.768847] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1764.769058] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1764.769322] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-026c0f04-9722-45a1-a263-cab16adad5ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.888530] env[62816]: DEBUG oslo_vmware.api [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788919, 'name': PowerOffVM_Task, 'duration_secs': 0.175397} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.888639] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1764.888799] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1764.889132] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ade877de-ed57-401f-8716-df6959dc50b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.900685] env[62816]: DEBUG nova.compute.manager [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received event network-changed-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1764.900876] env[62816]: DEBUG nova.compute.manager [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing instance network info cache due to event network-changed-fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1764.901094] env[62816]: DEBUG oslo_concurrency.lockutils [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.916678] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1764.916935] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1764.917079] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Deleting the datastore file [datastore1] bf0a39dc-634b-4ace-b11c-ebf50ef9b86b {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1764.917403] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7f9d176-b3bd-40b8-a8f3-65f8979e0539 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.924988] env[62816]: DEBUG oslo_vmware.api [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for the task: (returnval){ [ 1764.924988] env[62816]: value = "task-1788923" [ 1764.924988] env[62816]: _type = "Task" [ 1764.924988] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.932999] env[62816]: DEBUG oslo_vmware.api [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788923, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.963646] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Skipping network cache update for instance because it is being deleted. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1764.963871] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Skipping network cache update for instance because it is being deleted. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 1764.967192] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1764.967412] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1764.967595] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleting the datastore file [datastore1] f9d9593a-1c25-47a1-98fd-4462a851f134 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1764.968179] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74e8c850-0355-4d95-b960-db808562d342 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.974187] env[62816]: DEBUG oslo_vmware.api [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1764.974187] env[62816]: value = "task-1788924" [ 1764.974187] env[62816]: _type = "Task" [ 1764.974187] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.981651] env[62816]: DEBUG oslo_vmware.api [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.992694] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.025023] env[62816]: DEBUG nova.scheduler.client.report [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1765.210676] env[62816]: DEBUG nova.network.neutron [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1765.434319] env[62816]: DEBUG oslo_vmware.api [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Task: {'id': task-1788923, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162953} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.434592] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1765.434771] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1765.434950] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1765.435141] env[62816]: INFO nova.compute.manager [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1765.435380] env[62816]: DEBUG oslo.service.loopingcall [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.435569] env[62816]: DEBUG nova.compute.manager [-] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1765.435658] env[62816]: DEBUG nova.network.neutron [-] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1765.485494] env[62816]: DEBUG oslo_vmware.api [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1671} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.485746] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1765.485938] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1765.486126] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1765.486303] env[62816]: INFO nova.compute.manager [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1765.486593] env[62816]: DEBUG oslo.service.loopingcall [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1765.486710] env[62816]: DEBUG nova.compute.manager [-] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1765.486801] env[62816]: DEBUG nova.network.neutron [-] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1765.529857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.879s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.559174] env[62816]: INFO nova.scheduler.client.report [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Deleted allocations for instance d219e1a0-ca18-4315-9178-57953e517936 [ 1765.717149] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.717149] env[62816]: DEBUG nova.compute.manager [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Inject network info {{(pid=62816) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1765.717149] env[62816]: DEBUG nova.compute.manager [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] network_info to inject: |[{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1765.725225] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Reconfiguring VM instance to set the machine id {{(pid=62816) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1765.726322] env[62816]: DEBUG oslo_concurrency.lockutils [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.726514] env[62816]: DEBUG nova.network.neutron [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Refreshing network info cache for port fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1765.727560] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-408ca220-7826-4a47-974f-05d0a14eaf9d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.746947] env[62816]: DEBUG oslo_vmware.api [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1765.746947] env[62816]: value = "task-1788925" [ 1765.746947] env[62816]: _type = "Task" [ 1765.746947] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.755519] env[62816]: DEBUG oslo_vmware.api [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.997186] env[62816]: DEBUG nova.network.neutron [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updated VIF entry in instance network info cache for port fe984819-7451-4e21-be74-349cfccd5318. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1765.997639] env[62816]: DEBUG nova.network.neutron [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.052013] env[62816]: DEBUG nova.compute.manager [req-fbd673f6-e5e3-4a43-8266-4ee2e0a19553 req-374cf128-5432-46c8-9e59-5d97f2739980 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Received event network-vif-deleted-0aebe84d-1c20-4011-90d2-8e7f579b4b29 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1766.052292] env[62816]: INFO nova.compute.manager [req-fbd673f6-e5e3-4a43-8266-4ee2e0a19553 req-374cf128-5432-46c8-9e59-5d97f2739980 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Neutron deleted interface 0aebe84d-1c20-4011-90d2-8e7f579b4b29; detaching it from the instance and deleting it from the info cache [ 1766.052471] env[62816]: DEBUG nova.network.neutron [req-fbd673f6-e5e3-4a43-8266-4ee2e0a19553 req-374cf128-5432-46c8-9e59-5d97f2739980 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.065823] env[62816]: DEBUG oslo_concurrency.lockutils [None req-af33f4b5-ccd0-4a92-a70f-e2ec208d9633 tempest-MultipleCreateTestJSON-1865147490 tempest-MultipleCreateTestJSON-1865147490-project-member] Lock "d219e1a0-ca18-4315-9178-57953e517936" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.736s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.228191] env[62816]: DEBUG nova.network.neutron [-] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.259422] env[62816]: DEBUG oslo_vmware.api [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788925, 'name': ReconfigVM_Task, 'duration_secs': 0.388092} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.259906] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1810922c-f025-4558-8309-d9dc32e62279 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Reconfigured VM instance to set the machine id {{(pid=62816) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1766.501760] env[62816]: DEBUG oslo_concurrency.lockutils [req-f0802eeb-7781-464c-9527-40bde65826b0 req-942baf8b-a1f5-4dad-b8cc-f2c11a66b2b7 service nova] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.501996] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.502108] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1766.502251] env[62816]: DEBUG nova.objects.instance [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lazy-loading 'info_cache' on Instance uuid 0dbf907f-0313-435c-a8be-19f7e48ded76 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1766.503542] env[62816]: DEBUG nova.network.neutron [-] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.555274] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b50698de-d53d-4e89-89eb-c25770ee57fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.565600] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4970bef-3826-4fb7-91cf-d3913e867a01 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.599367] env[62816]: DEBUG nova.compute.manager [req-fbd673f6-e5e3-4a43-8266-4ee2e0a19553 req-374cf128-5432-46c8-9e59-5d97f2739980 service nova] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Detach interface failed, port_id=0aebe84d-1c20-4011-90d2-8e7f579b4b29, reason: Instance f9d9593a-1c25-47a1-98fd-4462a851f134 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1766.701041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "0dbf907f-0313-435c-a8be-19f7e48ded76" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.701353] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.701566] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "0dbf907f-0313-435c-a8be-19f7e48ded76-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.701751] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.701918] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.707128] env[62816]: INFO nova.compute.manager [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Terminating instance [ 1766.712585] env[62816]: DEBUG nova.compute.manager [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1766.712585] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1766.712585] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be874420-5cfa-4112-b6be-d23b7e214e7f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.721352] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1766.721662] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8e86a5a-086f-436a-a495-b57bedf9433b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.728051] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1766.728051] env[62816]: value = "task-1788926" [ 1766.728051] env[62816]: _type = "Task" [ 1766.728051] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.733466] env[62816]: INFO nova.compute.manager [-] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Took 1.30 seconds to deallocate network for instance. [ 1766.736959] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788926, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.806020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.806020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.927152] env[62816]: DEBUG nova.compute.manager [req-5037606f-0cba-451b-b366-32f9241700bd req-0c618b6c-4454-400c-98be-e3768c010aa3 service nova] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Received event network-vif-deleted-75829904-7a65-435d-a888-10e80563e07e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1767.007494] env[62816]: INFO nova.compute.manager [-] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Took 1.52 seconds to deallocate network for instance. [ 1767.081404] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1767.081604] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371147', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'name': 'volume-7d451747-ece2-4770-9e36-d259b810e2df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8105e650-8482-40c6-bd7a-b8daea19a0d5', 'attached_at': '', 'detached_at': '', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'serial': '7d451747-ece2-4770-9e36-d259b810e2df'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1767.082780] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a55fa29-ac10-40a5-960a-ac2032799148 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.106547] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc4607d-5a19-47f1-85b3-464b9bdba9c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.134671] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] volume-7d451747-ece2-4770-9e36-d259b810e2df/volume-7d451747-ece2-4770-9e36-d259b810e2df.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1767.135093] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52f10a59-bc14-42f2-a544-59a7e0e952f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.155235] env[62816]: DEBUG oslo_vmware.api [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1767.155235] env[62816]: value = "task-1788927" [ 1767.155235] env[62816]: _type = "Task" [ 1767.155235] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.162856] env[62816]: DEBUG oslo_vmware.api [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788927, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.239097] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788926, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.241218] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.241398] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.241587] env[62816]: DEBUG nova.objects.instance [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lazy-loading 'resources' on Instance uuid bf0a39dc-634b-4ace-b11c-ebf50ef9b86b {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1767.307536] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1767.515019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.664424] env[62816]: DEBUG oslo_vmware.api [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788927, 'name': ReconfigVM_Task, 'duration_secs': 0.338052} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.665204] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Reconfigured VM instance instance-00000036 to attach disk [datastore1] volume-7d451747-ece2-4770-9e36-d259b810e2df/volume-7d451747-ece2-4770-9e36-d259b810e2df.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1767.669493] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9046afc4-a8fd-4891-a70a-bde16451f80a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.690026] env[62816]: DEBUG oslo_vmware.api [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1767.690026] env[62816]: value = "task-1788928" [ 1767.690026] env[62816]: _type = "Task" [ 1767.690026] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.701596] env[62816]: DEBUG oslo_vmware.api [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788928, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.739266] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788926, 'name': PowerOffVM_Task, 'duration_secs': 0.514074} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.739266] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1767.739266] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1767.739266] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eda6cb9b-8635-4421-889e-460898ba5fbc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.822130] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1767.822130] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1767.822130] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Deleting the datastore file [datastore1] 0dbf907f-0313-435c-a8be-19f7e48ded76 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1767.824057] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d284ef0-471e-4783-8d00-87535d45caf8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.827178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.831737] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for the task: (returnval){ [ 1767.831737] env[62816]: value = "task-1788930" [ 1767.831737] env[62816]: _type = "Task" [ 1767.831737] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.840489] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.961367] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9cd831-a908-41c0-b361-cf956aca0790 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.968983] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5c51f8-2184-4be6-bcf8-27b2b4199c35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.001173] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6455e6-cfa8-40d7-9884-b24b2865a307 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.008482] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89dff930-3c48-4044-ab00-60bcfd991433 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.021653] env[62816]: DEBUG nova.compute.provider_tree [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.206397] env[62816]: DEBUG oslo_vmware.api [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788928, 'name': ReconfigVM_Task, 'duration_secs': 0.150788} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.206724] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371147', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'name': 'volume-7d451747-ece2-4770-9e36-d259b810e2df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8105e650-8482-40c6-bd7a-b8daea19a0d5', 'attached_at': '', 'detached_at': '', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'serial': '7d451747-ece2-4770-9e36-d259b810e2df'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1768.287647] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [{"id": "fe984819-7451-4e21-be74-349cfccd5318", "address": "fa:16:3e:cf:6b:aa", "network": {"id": "5a4eac83-6690-4cfe-9566-bb19f92bc348", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-771748026-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.161", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7931dc5c9a614764a02086f070df1b00", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c0b05e-6d10-474c-9173-4c8f1dacac9f", "external-id": "nsx-vlan-transportzone-693", "segmentation_id": 693, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe984819-74", "ovs_interfaceid": "fe984819-7451-4e21-be74-349cfccd5318", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.341814] env[62816]: DEBUG oslo_vmware.api [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Task: {'id': task-1788930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163673} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.342112] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1768.342307] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1768.342487] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1768.342657] env[62816]: INFO nova.compute.manager [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1768.342900] env[62816]: DEBUG oslo.service.loopingcall [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1768.343114] env[62816]: DEBUG nova.compute.manager [-] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1768.343212] env[62816]: DEBUG nova.network.neutron [-] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1768.526683] env[62816]: DEBUG nova.scheduler.client.report [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.532265] env[62816]: INFO nova.compute.manager [None req-bd0db766-00bb-4590-b620-e2dc3c387f3b tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Get console output [ 1768.532505] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-bd0db766-00bb-4590-b620-e2dc3c387f3b tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] The console log is missing. Check your VSPC configuration [ 1768.775536] env[62816]: DEBUG nova.compute.manager [req-856fdd27-cfc1-466d-9370-5f615a83ce58 req-fff48506-f04b-477a-80a0-05b3249eb5d0 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Received event network-vif-deleted-fe984819-7451-4e21-be74-349cfccd5318 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1768.775736] env[62816]: INFO nova.compute.manager [req-856fdd27-cfc1-466d-9370-5f615a83ce58 req-fff48506-f04b-477a-80a0-05b3249eb5d0 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Neutron deleted interface fe984819-7451-4e21-be74-349cfccd5318; detaching it from the instance and deleting it from the info cache [ 1768.775905] env[62816]: DEBUG nova.network.neutron [req-856fdd27-cfc1-466d-9370-5f615a83ce58 req-fff48506-f04b-477a-80a0-05b3249eb5d0 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.789966] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-0dbf907f-0313-435c-a8be-19f7e48ded76" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.790224] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1768.790539] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.790945] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.791214] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.791531] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.791674] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.791886] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.792080] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1768.792302] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1769.035953] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.038284] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.524s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.038575] env[62816]: DEBUG nova.objects.instance [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'resources' on Instance uuid f9d9593a-1c25-47a1-98fd-4462a851f134 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.058578] env[62816]: INFO nova.scheduler.client.report [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Deleted allocations for instance bf0a39dc-634b-4ace-b11c-ebf50ef9b86b [ 1769.246331] env[62816]: DEBUG nova.objects.instance [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lazy-loading 'flavor' on Instance uuid 8105e650-8482-40c6-bd7a-b8daea19a0d5 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.252620] env[62816]: DEBUG nova.network.neutron [-] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.280224] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-47028231-40dd-411c-a686-6127c7e2fce5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.290286] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb31f8e7-d017-4d94-a35b-9b297606c3cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.301488] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.322717] env[62816]: DEBUG nova.compute.manager [req-856fdd27-cfc1-466d-9370-5f615a83ce58 req-fff48506-f04b-477a-80a0-05b3249eb5d0 service nova] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Detach interface failed, port_id=fe984819-7451-4e21-be74-349cfccd5318, reason: Instance 0dbf907f-0313-435c-a8be-19f7e48ded76 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1769.569943] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d4b2d79d-c2e8-45e6-8d08-c0118c25e922 tempest-ServerAddressesTestJSON-93490714 tempest-ServerAddressesTestJSON-93490714-project-member] Lock "bf0a39dc-634b-4ace-b11c-ebf50ef9b86b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.333s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.606067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "a01e772c-dafe-4091-bae6-f9f59d5c972d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.606184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.606394] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "a01e772c-dafe-4091-bae6-f9f59d5c972d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.606577] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.606748] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.611685] env[62816]: INFO nova.compute.manager [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Terminating instance [ 1769.613927] env[62816]: DEBUG nova.compute.manager [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1769.614147] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1769.614980] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-296cf154-d679-4e9d-9a30-f8ad0b2affdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.624059] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1769.624301] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-02c96606-6c58-4048-8ba1-3f5937f1b70d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.630950] env[62816]: DEBUG oslo_vmware.api [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1769.630950] env[62816]: value = "task-1788931" [ 1769.630950] env[62816]: _type = "Task" [ 1769.630950] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.640624] env[62816]: DEBUG oslo_vmware.api [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.758372] env[62816]: DEBUG oslo_concurrency.lockutils [None req-19ddedcc-b2a7-453b-a01f-859a1d6a8ce8 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.779s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.758372] env[62816]: INFO nova.compute.manager [-] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Took 1.41 seconds to deallocate network for instance. [ 1769.779528] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dcc839e-21da-44cc-adff-dbe3df82dbba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.791140] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0362cf5-f15f-4bec-9afb-ff28d8f1d59a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.838606] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e546148-0a9c-4e02-a17b-fa2aeec84b62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.838606] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-953010bd-2b3d-4609-add2-a2e4715ac380 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.844787] env[62816]: DEBUG nova.compute.provider_tree [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.145816] env[62816]: DEBUG oslo_vmware.api [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788931, 'name': PowerOffVM_Task, 'duration_secs': 0.247923} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.146278] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1770.146493] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1770.146795] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4019dfd5-568d-4005-be1c-c43869aebe0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.265415] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.281823] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1770.282175] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1770.282780] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Deleting the datastore file [datastore1] a01e772c-dafe-4091-bae6-f9f59d5c972d {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1770.282958] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-717895d1-e5d4-4d32-bffd-f7561fb70c76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.289653] env[62816]: DEBUG oslo_vmware.api [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for the task: (returnval){ [ 1770.289653] env[62816]: value = "task-1788933" [ 1770.289653] env[62816]: _type = "Task" [ 1770.289653] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.300609] env[62816]: DEBUG oslo_vmware.api [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.352703] env[62816]: DEBUG nova.scheduler.client.report [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1770.636820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.637188] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.637328] env[62816]: INFO nova.compute.manager [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Shelving [ 1770.686553] env[62816]: DEBUG oslo_concurrency.lockutils [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.686834] env[62816]: DEBUG oslo_concurrency.lockutils [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.799684] env[62816]: DEBUG oslo_vmware.api [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Task: {'id': task-1788933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.20318} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.799946] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1770.800153] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1770.800332] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1770.800505] env[62816]: INFO nova.compute.manager [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1770.800745] env[62816]: DEBUG oslo.service.loopingcall [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1770.800935] env[62816]: DEBUG nova.compute.manager [-] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1770.801045] env[62816]: DEBUG nova.network.neutron [-] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1770.858227] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1770.860649] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.034s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1770.863320] env[62816]: INFO nova.compute.claims [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1770.888563] env[62816]: INFO nova.scheduler.client.report [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleted allocations for instance f9d9593a-1c25-47a1-98fd-4462a851f134 [ 1771.151738] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1771.152014] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a1615a9-76be-4404-b742-5a86e6723843 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.159875] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1771.159875] env[62816]: value = "task-1788934" [ 1771.159875] env[62816]: _type = "Task" [ 1771.159875] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.168901] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788934, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.190989] env[62816]: INFO nova.compute.manager [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Detaching volume 7d451747-ece2-4770-9e36-d259b810e2df [ 1771.238164] env[62816]: INFO nova.virt.block_device [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Attempting to driver detach volume 7d451747-ece2-4770-9e36-d259b810e2df from mountpoint /dev/sdb [ 1771.238490] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1771.238738] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371147', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'name': 'volume-7d451747-ece2-4770-9e36-d259b810e2df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8105e650-8482-40c6-bd7a-b8daea19a0d5', 'attached_at': '', 'detached_at': '', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'serial': '7d451747-ece2-4770-9e36-d259b810e2df'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1771.239679] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbe0b6f-6a70-4641-8dbc-60835febf96c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.264386] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99657b98-90f4-4b69-88a0-1c271d236fca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.272629] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35295bdc-1f70-419d-a3f3-1d339efda5a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.297229] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c671fe-6918-4f79-a305-e97702c2ea06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.315312] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] The volume has not been displaced from its original location: [datastore1] volume-7d451747-ece2-4770-9e36-d259b810e2df/volume-7d451747-ece2-4770-9e36-d259b810e2df.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1771.320844] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Reconfiguring VM instance instance-00000036 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1771.323699] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7b411b1-dd8d-4121-bfab-bf8f8a2f6f7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.341323] env[62816]: DEBUG nova.compute.manager [req-b6763bef-abb3-48b4-92e2-a9b556ed61bb req-5b5ad84e-c04a-41f3-aab8-9c2c9d164019 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Received event network-vif-deleted-bf038e03-93db-4837-8a8e-6b876acd1b7c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1771.341570] env[62816]: INFO nova.compute.manager [req-b6763bef-abb3-48b4-92e2-a9b556ed61bb req-5b5ad84e-c04a-41f3-aab8-9c2c9d164019 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Neutron deleted interface bf038e03-93db-4837-8a8e-6b876acd1b7c; detaching it from the instance and deleting it from the info cache [ 1771.341762] env[62816]: DEBUG nova.network.neutron [req-b6763bef-abb3-48b4-92e2-a9b556ed61bb req-5b5ad84e-c04a-41f3-aab8-9c2c9d164019 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.347933] env[62816]: DEBUG oslo_vmware.api [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1771.347933] env[62816]: value = "task-1788935" [ 1771.347933] env[62816]: _type = "Task" [ 1771.347933] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.360143] env[62816]: DEBUG oslo_vmware.api [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788935, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.401166] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ef6fb279-2ef7-4612-9954-f7de72e9b24e tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "f9d9593a-1c25-47a1-98fd-4462a851f134" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.054s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.570868] env[62816]: DEBUG nova.compute.manager [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1771.571824] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f306b616-d0f5-4fbf-9d02-055fe45a6ef5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.670204] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788934, 'name': PowerOffVM_Task, 'duration_secs': 0.20223} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.670918] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1771.671745] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59ea889d-77e2-4550-9d8e-24edf389ec63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.690065] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec7c6265-5fa6-4c03-947c-2dd1f79a633a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.840359] env[62816]: DEBUG nova.network.neutron [-] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1771.844436] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88d277f5-59e1-481e-a2dc-ea436126bb80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.860578] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db08b1c2-57e5-49da-a82a-bc31bdd27844 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.876638] env[62816]: DEBUG oslo_vmware.api [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788935, 'name': ReconfigVM_Task, 'duration_secs': 0.214052} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.877413] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Reconfigured VM instance instance-00000036 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1771.882463] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7d7f572-da56-4bfc-a6e1-eddf65cba95b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.904652] env[62816]: DEBUG nova.compute.manager [req-b6763bef-abb3-48b4-92e2-a9b556ed61bb req-5b5ad84e-c04a-41f3-aab8-9c2c9d164019 service nova] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Detach interface failed, port_id=bf038e03-93db-4837-8a8e-6b876acd1b7c, reason: Instance a01e772c-dafe-4091-bae6-f9f59d5c972d could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1771.913431] env[62816]: DEBUG oslo_vmware.api [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1771.913431] env[62816]: value = "task-1788936" [ 1771.913431] env[62816]: _type = "Task" [ 1771.913431] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.922346] env[62816]: DEBUG oslo_vmware.api [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788936, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.081973] env[62816]: INFO nova.compute.manager [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] instance snapshotting [ 1772.082589] env[62816]: DEBUG nova.objects.instance [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'flavor' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.084390] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf864b1-cebb-4bd4-a45a-80ecf01480d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.092610] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2709fc35-c29d-43c4-a0bb-a5c834baeee7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.122534] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a855e19c-e0b6-4210-9b7c-b3d47b9bff2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.130049] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bed90e-8287-4d63-bf01-320ffd7715ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.145579] env[62816]: DEBUG nova.compute.provider_tree [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1772.201040] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1772.201455] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-93857ae6-6cdc-436c-bdb8-b9f835bfa499 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.209376] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1772.209376] env[62816]: value = "task-1788937" [ 1772.209376] env[62816]: _type = "Task" [ 1772.209376] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.217461] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788937, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.343306] env[62816]: INFO nova.compute.manager [-] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Took 1.54 seconds to deallocate network for instance. [ 1772.424713] env[62816]: DEBUG oslo_vmware.api [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788936, 'name': ReconfigVM_Task, 'duration_secs': 0.138277} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.425058] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371147', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'name': 'volume-7d451747-ece2-4770-9e36-d259b810e2df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '8105e650-8482-40c6-bd7a-b8daea19a0d5', 'attached_at': '', 'detached_at': '', 'volume_id': '7d451747-ece2-4770-9e36-d259b810e2df', 'serial': '7d451747-ece2-4770-9e36-d259b810e2df'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1772.595087] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5554d80-1b67-484b-8d39-baaf64620b40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.615162] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c3f8e9-dc7d-4320-a8ca-8611aef2a44d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.649344] env[62816]: DEBUG nova.scheduler.client.report [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1772.719299] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788937, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.850632] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.970384] env[62816]: DEBUG nova.objects.instance [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lazy-loading 'flavor' on Instance uuid 8105e650-8482-40c6-bd7a-b8daea19a0d5 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1773.125935] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1773.126312] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d9b92810-a74c-47e1-b588-c79b4e570cf7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.134488] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1773.134488] env[62816]: value = "task-1788938" [ 1773.134488] env[62816]: _type = "Task" [ 1773.134488] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.142316] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788938, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.154497] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.155581] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1773.159740] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.858s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.162987] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.162987] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1773.162987] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.895s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.162987] env[62816]: DEBUG nova.objects.instance [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lazy-loading 'resources' on Instance uuid 0dbf907f-0313-435c-a8be-19f7e48ded76 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1773.162987] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc66f75-60f9-4cdc-8587-aada717b5094 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.172420] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c49459c-fa50-4bd0-b25f-9589d1730973 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.190351] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2d5ff8-5058-4252-b202-207bbbbc7241 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.197724] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6898790-2f5f-4e46-8494-cbce44f99d8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.232405] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178389MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1773.232629] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.241498] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788937, 'name': CreateSnapshot_Task, 'duration_secs': 0.652321} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.241819] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1773.242555] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d738eb5-4a29-4ea0-9179-ad91a6744ab6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.592398] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.592583] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.645677] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788938, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.668456] env[62816]: DEBUG nova.compute.utils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1773.673565] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1773.673565] env[62816]: DEBUG nova.network.neutron [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1773.745731] env[62816]: DEBUG nova.policy [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0a2129bc83a45d695730796b55f1caf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72d49b085afa4df99700ea4e15e9c87e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1773.760215] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1773.763157] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a207f673-ba4c-4a8a-bd1f-b8556215f6f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.772738] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1773.772738] env[62816]: value = "task-1788939" [ 1773.772738] env[62816]: _type = "Task" [ 1773.772738] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.781837] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788939, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.888390] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00563dbb-cd50-4b28-b181-926e7d78be84 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.895782] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17acd4d8-0d08-47ba-9644-140fa5e3dddb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.926911] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df6eff8-e9dc-4c35-8f5a-a42e41b1560e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.934347] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53fe7dca-ea5f-4a5d-b62b-22db20721727 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.951223] env[62816]: DEBUG nova.compute.provider_tree [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.978132] env[62816]: DEBUG oslo_concurrency.lockutils [None req-88bb1e18-5736-4b38-b4f5-ae174d6d3ad5 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.291s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.095447] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1774.145672] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788938, 'name': CreateSnapshot_Task, 'duration_secs': 0.619375} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.145963] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1774.147082] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd1938a-163b-4813-9e3a-86896efade93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.173218] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1774.240156] env[62816]: DEBUG nova.network.neutron [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Successfully created port: e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1774.284180] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788939, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.457244] env[62816]: DEBUG nova.scheduler.client.report [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1774.619302] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.667222] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1774.667432] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e16b075f-b6cd-4a59-ab45-c6e1c430eb42 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.675971] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1774.675971] env[62816]: value = "task-1788940" [ 1774.675971] env[62816]: _type = "Task" [ 1774.675971] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.687367] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788940, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.785024] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788939, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.965265] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.966563] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.116s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.967047] env[62816]: DEBUG nova.objects.instance [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lazy-loading 'resources' on Instance uuid a01e772c-dafe-4091-bae6-f9f59d5c972d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1774.994179] env[62816]: INFO nova.scheduler.client.report [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Deleted allocations for instance 0dbf907f-0313-435c-a8be-19f7e48ded76 [ 1775.187927] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1775.189784] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788940, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.210053] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1775.211140] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1775.211140] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1775.211140] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1775.211140] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1775.211537] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1775.211537] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1775.211630] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1775.211805] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1775.211992] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1775.212264] env[62816]: DEBUG nova.virt.hardware [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1775.213127] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e2af06-c8eb-4dd7-9e30-8c6ddcf4f576 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.222009] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e100e48b-1b94-4b15-a9ed-6589219ba4d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.283373] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788939, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.502623] env[62816]: DEBUG oslo_concurrency.lockutils [None req-492d203d-40a0-4c1a-9afb-b858c12b48c1 tempest-AttachInterfacesUnderV243Test-1256733884 tempest-AttachInterfacesUnderV243Test-1256733884-project-member] Lock "0dbf907f-0313-435c-a8be-19f7e48ded76" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.801s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.533000] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "913bba01-e64b-4b52-af94-5effcefc2677" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.533266] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "913bba01-e64b-4b52-af94-5effcefc2677" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.681923] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7abe6d6-25cb-4f7d-973e-aa14ffb803f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.691562] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788940, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.694397] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68de0683-fc8c-402e-97fc-46717687dd49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.701060] env[62816]: DEBUG nova.compute.manager [req-d6fd7b74-0d4a-4ede-9556-dc10599407ec req-76503e42-f447-443a-b719-261d781131d5 service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Received event network-vif-plugged-e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.701685] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6fd7b74-0d4a-4ede-9556-dc10599407ec req-76503e42-f447-443a-b719-261d781131d5 service nova] Acquiring lock "a50b78c5-bb7e-4038-9a74-ecde2042828f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.701685] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6fd7b74-0d4a-4ede-9556-dc10599407ec req-76503e42-f447-443a-b719-261d781131d5 service nova] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.701685] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6fd7b74-0d4a-4ede-9556-dc10599407ec req-76503e42-f447-443a-b719-261d781131d5 service nova] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.701961] env[62816]: DEBUG nova.compute.manager [req-d6fd7b74-0d4a-4ede-9556-dc10599407ec req-76503e42-f447-443a-b719-261d781131d5 service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] No waiting events found dispatching network-vif-plugged-e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1775.701961] env[62816]: WARNING nova.compute.manager [req-d6fd7b74-0d4a-4ede-9556-dc10599407ec req-76503e42-f447-443a-b719-261d781131d5 service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Received unexpected event network-vif-plugged-e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 for instance with vm_state building and task_state spawning. [ 1775.731526] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a196713d-fb8a-4ff6-845f-29081bf52d70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.741170] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8166b8c5-4c3a-41a5-9367-8ac7b82e96f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.758564] env[62816]: DEBUG nova.compute.provider_tree [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1775.784952] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788939, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.799062] env[62816]: DEBUG nova.network.neutron [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Successfully updated port: e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1776.035731] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1776.188664] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788940, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.258917] env[62816]: DEBUG nova.scheduler.client.report [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1776.285715] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788939, 'name': CloneVM_Task, 'duration_secs': 2.400012} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.285989] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Created linked-clone VM from snapshot [ 1776.286723] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf369b9d-62ce-48b4-96a2-07c5d3d6a9e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.296266] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Uploading image 74b5ba28-84d4-460e-9f4d-6cb94c84b4ea {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1776.301580] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-a50b78c5-bb7e-4038-9a74-ecde2042828f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.301722] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-a50b78c5-bb7e-4038-9a74-ecde2042828f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.301863] env[62816]: DEBUG nova.network.neutron [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1776.327947] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1776.327947] env[62816]: value = "vm-371150" [ 1776.327947] env[62816]: _type = "VirtualMachine" [ 1776.327947] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1776.328485] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4671d741-1353-4690-b9d4-06a52e1be489 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.336307] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease: (returnval){ [ 1776.336307] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207c568-946c-7360-6714-207c5cbe113e" [ 1776.336307] env[62816]: _type = "HttpNfcLease" [ 1776.336307] env[62816]: } obtained for exporting VM: (result){ [ 1776.336307] env[62816]: value = "vm-371150" [ 1776.336307] env[62816]: _type = "VirtualMachine" [ 1776.336307] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1776.336694] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the lease: (returnval){ [ 1776.336694] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207c568-946c-7360-6714-207c5cbe113e" [ 1776.336694] env[62816]: _type = "HttpNfcLease" [ 1776.336694] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1776.342976] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1776.342976] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207c568-946c-7360-6714-207c5cbe113e" [ 1776.342976] env[62816]: _type = "HttpNfcLease" [ 1776.342976] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1776.564965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.689319] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788940, 'name': CloneVM_Task, 'duration_secs': 1.580808} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.689596] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created linked-clone VM from snapshot [ 1776.690422] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b159659-5252-4d0f-942b-d6ee83122393 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.697482] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Uploading image 141b03bd-f52b-4815-b348-7822ad15c428 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1776.720486] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1776.720486] env[62816]: value = "vm-371151" [ 1776.720486] env[62816]: _type = "VirtualMachine" [ 1776.720486] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1776.720693] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-65dfb1ba-8e92-45b6-8032-ff19275af2a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.727794] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease: (returnval){ [ 1776.727794] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52645f54-d1f9-d31b-6d7a-e818d9ebe7a4" [ 1776.727794] env[62816]: _type = "HttpNfcLease" [ 1776.727794] env[62816]: } obtained for exporting VM: (result){ [ 1776.727794] env[62816]: value = "vm-371151" [ 1776.727794] env[62816]: _type = "VirtualMachine" [ 1776.727794] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1776.728112] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the lease: (returnval){ [ 1776.728112] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52645f54-d1f9-d31b-6d7a-e818d9ebe7a4" [ 1776.728112] env[62816]: _type = "HttpNfcLease" [ 1776.728112] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1776.734435] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1776.734435] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52645f54-d1f9-d31b-6d7a-e818d9ebe7a4" [ 1776.734435] env[62816]: _type = "HttpNfcLease" [ 1776.734435] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1776.764439] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.798s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.766844] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 3.534s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.790729] env[62816]: INFO nova.scheduler.client.report [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Deleted allocations for instance a01e772c-dafe-4091-bae6-f9f59d5c972d [ 1776.832496] env[62816]: DEBUG nova.network.neutron [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1776.844476] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1776.844476] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207c568-946c-7360-6714-207c5cbe113e" [ 1776.844476] env[62816]: _type = "HttpNfcLease" [ 1776.844476] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1776.845438] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1776.845438] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207c568-946c-7360-6714-207c5cbe113e" [ 1776.845438] env[62816]: _type = "HttpNfcLease" [ 1776.845438] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1776.845891] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d81bedc-e1f4-44c9-be1c-593c24277272 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.853247] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e2531f-29ff-52d6-3842-5e2b09dd7a6d/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1776.853371] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e2531f-29ff-52d6-3842-5e2b09dd7a6d/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1776.959632] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d9122b05-8524-4774-9164-5edcc78e2093 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.056227] env[62816]: DEBUG nova.network.neutron [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Updating instance_info_cache with network_info: [{"id": "e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0", "address": "fa:16:3e:25:50:c5", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11d46ce-b6", "ovs_interfaceid": "e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.238596] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1777.238596] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52645f54-d1f9-d31b-6d7a-e818d9ebe7a4" [ 1777.238596] env[62816]: _type = "HttpNfcLease" [ 1777.238596] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1777.238596] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1777.238596] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52645f54-d1f9-d31b-6d7a-e818d9ebe7a4" [ 1777.238596] env[62816]: _type = "HttpNfcLease" [ 1777.238596] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1777.239974] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2183ef6-df9b-4752-a30e-90b597445878 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.247983] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281b3ff-1866-3745-e20f-f3cd599c7bf2/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1777.247983] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281b3ff-1866-3745-e20f-f3cd599c7bf2/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1777.322021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a4e548b7-44b3-40ee-b764-35df53178416 tempest-ServersWithSpecificFlavorTestJSON-111190641 tempest-ServersWithSpecificFlavorTestJSON-111190641-project-member] Lock "a01e772c-dafe-4091-bae6-f9f59d5c972d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.714s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.365185] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b985fb55-0e5a-49b5-b0cf-9bb763c0a757 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.565546] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-a50b78c5-bb7e-4038-9a74-ecde2042828f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.567731] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance network_info: |[{"id": "e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0", "address": "fa:16:3e:25:50:c5", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11d46ce-b6", "ovs_interfaceid": "e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1777.569669] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:50:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1777.578145] env[62816]: DEBUG oslo.service.loopingcall [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.578450] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1777.578797] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c9e5f007-0186-4e4a-93fb-4b1bfa2f3be5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.600884] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1777.600884] env[62816]: value = "task-1788943" [ 1777.600884] env[62816]: _type = "Task" [ 1777.600884] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.610087] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788943, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.735274] env[62816]: DEBUG nova.compute.manager [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Received event network-changed-e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1777.738252] env[62816]: DEBUG nova.compute.manager [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Refreshing instance network info cache due to event network-changed-e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1777.740284] env[62816]: DEBUG oslo_concurrency.lockutils [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] Acquiring lock "refresh_cache-a50b78c5-bb7e-4038-9a74-ecde2042828f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.740914] env[62816]: DEBUG oslo_concurrency.lockutils [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] Acquired lock "refresh_cache-a50b78c5-bb7e-4038-9a74-ecde2042828f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.741573] env[62816]: DEBUG nova.network.neutron [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Refreshing network info cache for port e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 31ac8296-14fa-46f7-b825-c31904b832d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 8105e650-8482-40c6-bd7a-b8daea19a0d5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9745413b-2bd8-45d7-8491-483e4921b59c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance dd833e38-691c-4757-9c6b-659c74343d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c66fa160-d4dd-429f-8751-f36cb2020ff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 543d69d2-0694-4d57-bbae-f8851ff0f0dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance d03ed540-5c20-4bcb-ac7e-eec8c09e4451 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4ab07a21-2685-42bc-af13-b95473993d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b9e8af08-9579-4dbf-8ea1-35ffab75e159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.850823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9972b167-a950-4dba-ac02-068f66300053 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.851563] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f97ea34e-792e-4023-bd2f-549dba129925 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1777.851563] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance a50b78c5-bb7e-4038-9a74-ecde2042828f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.111492] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788943, 'name': CreateVM_Task, 'duration_secs': 0.3916} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.111989] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1778.112892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.113301] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.114313] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1778.115363] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e3177dd-3b4e-43c9-8b7a-e165114877c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.123038] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1778.123038] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d5204a-5bf6-ccab-821b-6482aad428f6" [ 1778.123038] env[62816]: _type = "Task" [ 1778.123038] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.130828] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d5204a-5bf6-ccab-821b-6482aad428f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.355740] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance fa719ff5-0219-485f-aac7-2cde4bbef8f6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1778.633667] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d5204a-5bf6-ccab-821b-6482aad428f6, 'name': SearchDatastore_Task, 'duration_secs': 0.014218} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.634163] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.634473] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1778.634714] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.634864] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.635065] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1778.635339] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d469fcb2-fddb-4edb-9dbb-9056e9d64d8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.638429] env[62816]: DEBUG nova.network.neutron [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Updated VIF entry in instance network info cache for port e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1778.639753] env[62816]: DEBUG nova.network.neutron [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Updating instance_info_cache with network_info: [{"id": "e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0", "address": "fa:16:3e:25:50:c5", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape11d46ce-b6", "ovs_interfaceid": "e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.648261] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1778.648463] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1778.651290] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c54b2a02-bced-4c6a-8d0e-96dc90d0aa39 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.657725] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1778.657725] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5243d0eb-2357-9259-3005-7384127b3ce1" [ 1778.657725] env[62816]: _type = "Task" [ 1778.657725] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.667400] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5243d0eb-2357-9259-3005-7384127b3ce1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.862167] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 913bba01-e64b-4b52-af94-5effcefc2677 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1778.862167] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1778.862167] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1779.085514] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b1f325-8263-435d-b31a-c3c25dd592df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.094571] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9363a1f3-427e-4f28-8a62-50647bba9208 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.136906] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3f6a11-e53e-4149-961f-856b9eb46957 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.145340] env[62816]: DEBUG oslo_concurrency.lockutils [req-a45855af-99bf-46d2-9f33-69c6dc6c3167 req-a684ad0d-ef8c-41c9-8eb9-5190f3431a2a service nova] Releasing lock "refresh_cache-a50b78c5-bb7e-4038-9a74-ecde2042828f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.147257] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d42cc75-bb15-4c68-a9dc-b202182767e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.163485] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.185422] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5243d0eb-2357-9259-3005-7384127b3ce1, 'name': SearchDatastore_Task, 'duration_secs': 0.013896} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.185422] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97ae87cd-94be-468a-8378-912c89e2ac3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.191324] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1779.191324] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521ec34e-6be5-3964-0853-86d86bed9fe5" [ 1779.191324] env[62816]: _type = "Task" [ 1779.191324] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.201160] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521ec34e-6be5-3964-0853-86d86bed9fe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.303915] env[62816]: INFO nova.compute.manager [None req-9e3d62c1-d0bd-4c36-b70f-afb300b7e4c0 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Get console output [ 1779.304194] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-9e3d62c1-d0bd-4c36-b70f-afb300b7e4c0 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] The console log is missing. Check your VSPC configuration [ 1779.670030] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1779.712118] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521ec34e-6be5-3964-0853-86d86bed9fe5, 'name': SearchDatastore_Task, 'duration_secs': 0.014415} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.712414] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.712678] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1779.712957] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15a4fbdd-80a4-4c4b-9af4-14d9cdc2fbb1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.721415] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1779.721415] env[62816]: value = "task-1788944" [ 1779.721415] env[62816]: _type = "Task" [ 1779.721415] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.732846] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788944, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.177929] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1780.177929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.409s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.177929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.557s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.177929] env[62816]: INFO nova.compute.claims [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1780.181209] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1780.181569] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1780.236800] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788944, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.385805] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "31ac8296-14fa-46f7-b825-c31904b832d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.386139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "31ac8296-14fa-46f7-b825-c31904b832d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.386381] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "31ac8296-14fa-46f7-b825-c31904b832d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.386591] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "31ac8296-14fa-46f7-b825-c31904b832d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.386778] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "31ac8296-14fa-46f7-b825-c31904b832d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.389129] env[62816]: INFO nova.compute.manager [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Terminating instance [ 1780.391282] env[62816]: DEBUG nova.compute.manager [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1780.391504] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1780.392409] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18b4224a-cd99-4350-ae1f-dadd69a9047f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.403087] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1780.403686] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2eacbdac-2916-4fcc-b172-7b692692f7dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.409657] env[62816]: DEBUG oslo_vmware.api [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1780.409657] env[62816]: value = "task-1788945" [ 1780.409657] env[62816]: _type = "Task" [ 1780.409657] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.420177] env[62816]: DEBUG oslo_vmware.api [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.703458] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] There are 57 instances to clean {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1780.703672] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: bf0a39dc-634b-4ace-b11c-ebf50ef9b86b] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1780.733709] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788944, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644635} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.734025] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1780.734249] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1780.734539] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fae9826-0710-4109-b353-a6e72c6818ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.742846] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1780.742846] env[62816]: value = "task-1788946" [ 1780.742846] env[62816]: _type = "Task" [ 1780.742846] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.751278] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.919962] env[62816]: DEBUG oslo_vmware.api [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788945, 'name': PowerOffVM_Task, 'duration_secs': 0.208895} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.920372] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1780.920495] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1780.920757] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e511a38-ddfd-4558-b2d7-b88b41c2e37c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.994535] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1780.994535] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1780.994535] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Deleting the datastore file [datastore1] 31ac8296-14fa-46f7-b825-c31904b832d5 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1780.994535] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0e8b058b-cf9b-40ed-ae0a-6cd8d965019d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.002721] env[62816]: DEBUG oslo_vmware.api [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for the task: (returnval){ [ 1781.002721] env[62816]: value = "task-1788948" [ 1781.002721] env[62816]: _type = "Task" [ 1781.002721] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.011913] env[62816]: DEBUG oslo_vmware.api [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.207732] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 56bea284-2871-447b-9bb0-2f57c3053dc0] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1781.263138] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070511} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.263449] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1781.264441] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed43399-0819-43ea-9ef9-b8f380972723 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.290734] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1781.293848] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-63c3e282-17b3-493e-9801-b1c3970e0533 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.317888] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1781.317888] env[62816]: value = "task-1788949" [ 1781.317888] env[62816]: _type = "Task" [ 1781.317888] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.331080] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.436778] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14fe2f69-83eb-4915-89ea-f2f2719111aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.445607] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6686fc46-cee8-43a8-8d01-86fda3f28a0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.480837] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de5e8f2-f796-45a3-9976-226c5232601d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.489847] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e84c545-ae2a-42f6-8474-4dacdb05b37b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.503928] env[62816]: DEBUG nova.compute.provider_tree [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1781.513465] env[62816]: DEBUG oslo_vmware.api [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Task: {'id': task-1788948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325012} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.514370] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1781.514568] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1781.514749] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1781.514923] env[62816]: INFO nova.compute.manager [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1781.515190] env[62816]: DEBUG oslo.service.loopingcall [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1781.515389] env[62816]: DEBUG nova.compute.manager [-] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1781.515487] env[62816]: DEBUG nova.network.neutron [-] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1781.712035] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: d219e1a0-ca18-4315-9178-57953e517936] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1781.829477] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788949, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.010045] env[62816]: DEBUG nova.scheduler.client.report [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1782.172209] env[62816]: DEBUG nova.compute.manager [req-96436dc8-873b-4a12-ae97-e8d5903e582d req-44b95076-b2cb-43ec-ac88-89f76caa9d89 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Received event network-vif-deleted-cb0d8306-4954-4597-b857-f3410e8e30d8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1782.172476] env[62816]: INFO nova.compute.manager [req-96436dc8-873b-4a12-ae97-e8d5903e582d req-44b95076-b2cb-43ec-ac88-89f76caa9d89 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Neutron deleted interface cb0d8306-4954-4597-b857-f3410e8e30d8; detaching it from the instance and deleting it from the info cache [ 1782.172832] env[62816]: DEBUG nova.network.neutron [req-96436dc8-873b-4a12-ae97-e8d5903e582d req-44b95076-b2cb-43ec-ac88-89f76caa9d89 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.214903] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 74c15238-221c-4d1c-8577-4046d5666e45] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1782.329294] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788949, 'name': ReconfigVM_Task, 'duration_secs': 0.626861} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.329638] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Reconfigured VM instance instance-00000057 to attach disk [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1782.330314] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-19df1877-cd52-4ce2-a586-c258f5592c11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.338122] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1782.338122] env[62816]: value = "task-1788950" [ 1782.338122] env[62816]: _type = "Task" [ 1782.338122] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.346514] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788950, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.515336] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1782.515921] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1782.518841] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.954s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1782.520367] env[62816]: INFO nova.compute.claims [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1782.645837] env[62816]: DEBUG nova.network.neutron [-] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.675959] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3c50ec6-933a-41c4-9de6-5fa73ae5ff5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.686976] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f9a2c3-4988-49c7-ab61-a30956efd295 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.720843] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0acc334c-e400-4b28-8ee7-8d6cafb057e9] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1782.726020] env[62816]: DEBUG nova.compute.manager [req-96436dc8-873b-4a12-ae97-e8d5903e582d req-44b95076-b2cb-43ec-ac88-89f76caa9d89 service nova] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Detach interface failed, port_id=cb0d8306-4954-4597-b857-f3410e8e30d8, reason: Instance 31ac8296-14fa-46f7-b825-c31904b832d5 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1782.849390] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788950, 'name': Rename_Task, 'duration_secs': 0.151262} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.849718] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1782.849992] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-690d15e1-0b57-4b3f-91d3-7e7f8b2af894 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.857582] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1782.857582] env[62816]: value = "task-1788951" [ 1782.857582] env[62816]: _type = "Task" [ 1782.857582] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.865640] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.024555] env[62816]: DEBUG nova.compute.utils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1783.029139] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1783.029497] env[62816]: DEBUG nova.network.neutron [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1783.086543] env[62816]: DEBUG nova.policy [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ffca35ab8614990be3ff2c9697d424f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef0dee852154407fa3201a860c55bf3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1783.149182] env[62816]: INFO nova.compute.manager [-] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Took 1.63 seconds to deallocate network for instance. [ 1783.227320] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 915127f6-2da7-4eab-a7cb-331a41d04d0e] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1783.371450] env[62816]: DEBUG oslo_vmware.api [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788951, 'name': PowerOnVM_Task, 'duration_secs': 0.489509} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.371836] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1783.372137] env[62816]: INFO nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1783.372411] env[62816]: DEBUG nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1783.373569] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0291949-0486-4880-8073-36a76c1f6a62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.458089] env[62816]: DEBUG nova.network.neutron [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Successfully created port: 2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1783.529469] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1783.660832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.736920] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 2583e2ba-8904-420c-a417-d6af71bfa9ac] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1783.749620] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b24f41-3c12-485a-af94-536e069a5366 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.757755] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19342378-d3b9-4c49-a1b6-e3e2eca0775e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.790520] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1b8ed6-510d-466a-b21e-648d82fb7d4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.798867] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d853835-9c39-4f36-8ef5-ec5ff016cc24 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.813583] env[62816]: DEBUG nova.compute.provider_tree [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1783.894052] env[62816]: INFO nova.compute.manager [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Took 16.08 seconds to build instance. [ 1784.240008] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9ab4e631-5b31-4b37-9b49-4f0423286752] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1784.317207] env[62816]: DEBUG nova.scheduler.client.report [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1784.395476] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4be3b1d0-4b58-4a00-b7c8-e5be07d5ffee tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.591s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.544437] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1784.744714] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 679ce8d3-a57c-4620-81bc-ee8deea4bc8e] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1784.822700] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.304s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.823265] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1784.825847] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.165s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.826106] env[62816]: DEBUG nova.objects.instance [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lazy-loading 'resources' on Instance uuid 31ac8296-14fa-46f7-b825-c31904b832d5 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1784.920152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.920450] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.052663] env[62816]: DEBUG nova.network.neutron [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Successfully updated port: 2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1785.238854] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.239200] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.239531] env[62816]: DEBUG nova.compute.manager [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1785.240837] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496d05f2-d91b-4778-b14e-8a1f1be9c798 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.245700] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f09d7ee0-72ee-4aa7-aa7f-deb6dd16be72] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1785.253065] env[62816]: DEBUG nova.compute.manager [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1785.253892] env[62816]: DEBUG nova.objects.instance [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'flavor' on Instance uuid a50b78c5-bb7e-4038-9a74-ecde2042828f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.328837] env[62816]: DEBUG nova.compute.utils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1785.330696] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1785.330919] env[62816]: DEBUG nova.network.neutron [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1785.378376] env[62816]: DEBUG nova.policy [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f59998717d8246c5b238194d0d8f5cf4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b1b175f09c47457ead5fff6d3ecf1cee', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1785.426690] env[62816]: DEBUG nova.compute.utils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1785.543230] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64cf50c-7f6c-4425-83e7-c04ced11426c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.551276] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7992e153-14f5-4917-96ed-85b36d435fe3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.555128] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1785.555435] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.555589] env[62816]: DEBUG nova.network.neutron [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1785.589034] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f708d6e3-4ba0-45d4-80f2-89b297badbc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.598085] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20bf83b4-f75b-4641-8d02-cd4f467074c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.613976] env[62816]: DEBUG nova.compute.provider_tree [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1785.673977] env[62816]: DEBUG nova.network.neutron [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Successfully created port: 76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1785.749830] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3fbcf39f-3b4e-48d1-bfa4-b0fa6448cabb] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1785.759348] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1785.759679] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf8e47b5-5dd1-4d2b-ab9c-fd03db77da1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.767218] env[62816]: DEBUG oslo_vmware.api [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1785.767218] env[62816]: value = "task-1788952" [ 1785.767218] env[62816]: _type = "Task" [ 1785.767218] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.777411] env[62816]: DEBUG oslo_vmware.api [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788952, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.837160] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1785.930030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.086706] env[62816]: DEBUG nova.network.neutron [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1786.096807] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1786.096807] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1786.096807] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.097153] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1786.097472] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.097783] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1786.098150] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1786.098457] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1786.100031] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1786.100031] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1786.100031] env[62816]: DEBUG nova.virt.hardware [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1786.101357] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0230ea22-38d2-4550-af78-74ca5dc356ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.113330] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e2531f-29ff-52d6-3842-5e2b09dd7a6d/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1786.114689] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec10e89b-b404-409c-a8e0-1b3b14c53b6c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.120161] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd2f042-82df-434d-9544-d42ea396c922 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.123437] env[62816]: DEBUG nova.scheduler.client.report [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1786.137992] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e2531f-29ff-52d6-3842-5e2b09dd7a6d/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1786.138180] env[62816]: ERROR oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e2531f-29ff-52d6-3842-5e2b09dd7a6d/disk-0.vmdk due to incomplete transfer. [ 1786.139012] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4f08bd6a-f3ba-4419-9493-a9be50ce05f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.147864] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e2531f-29ff-52d6-3842-5e2b09dd7a6d/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1786.148037] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Uploaded image 74b5ba28-84d4-460e-9f4d-6cb94c84b4ea to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1786.150393] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1786.150610] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7c1f5f07-b6f3-4765-bbea-a78aab869cbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.156749] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1786.156749] env[62816]: value = "task-1788953" [ 1786.156749] env[62816]: _type = "Task" [ 1786.156749] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.164913] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788953, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.253451] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: a5f50ca4-4648-4f33-a6d3-18cfc4fd3441] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1786.280636] env[62816]: DEBUG oslo_vmware.api [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788952, 'name': PowerOffVM_Task, 'duration_secs': 0.225575} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.281068] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1786.281276] env[62816]: DEBUG nova.compute.manager [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1786.282377] env[62816]: DEBUG nova.network.neutron [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updating instance_info_cache with network_info: [{"id": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "address": "fa:16:3e:33:41:2b", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d41a0d3-8e", "ovs_interfaceid": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.284651] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1340a9-cf3f-4f80-8217-7dcb1da1a019 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.391573] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281b3ff-1866-3745-e20f-f3cd599c7bf2/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1786.392828] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5061cb81-11f9-4ee9-b185-dc2c8b43f09b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.399283] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281b3ff-1866-3745-e20f-f3cd599c7bf2/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1786.399478] env[62816]: ERROR oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281b3ff-1866-3745-e20f-f3cd599c7bf2/disk-0.vmdk due to incomplete transfer. [ 1786.399776] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ea541c5c-85f5-476a-9e35-649598f356d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.407378] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5281b3ff-1866-3745-e20f-f3cd599c7bf2/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1786.407480] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Uploaded image 141b03bd-f52b-4815-b348-7822ad15c428 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1786.409069] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1786.409315] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0ca580cc-de25-4b7c-ae03-71e398b2ab9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.413487] env[62816]: DEBUG nova.compute.manager [req-d3880ac1-7a16-432c-8725-925b7242a73b req-2c7b0a27-92bf-44a1-a631-18407cb9196d service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Received event network-vif-plugged-2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1786.413815] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3880ac1-7a16-432c-8725-925b7242a73b req-2c7b0a27-92bf-44a1-a631-18407cb9196d service nova] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.413893] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3880ac1-7a16-432c-8725-925b7242a73b req-2c7b0a27-92bf-44a1-a631-18407cb9196d service nova] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.414103] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3880ac1-7a16-432c-8725-925b7242a73b req-2c7b0a27-92bf-44a1-a631-18407cb9196d service nova] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.414236] env[62816]: DEBUG nova.compute.manager [req-d3880ac1-7a16-432c-8725-925b7242a73b req-2c7b0a27-92bf-44a1-a631-18407cb9196d service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] No waiting events found dispatching network-vif-plugged-2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1786.414416] env[62816]: WARNING nova.compute.manager [req-d3880ac1-7a16-432c-8725-925b7242a73b req-2c7b0a27-92bf-44a1-a631-18407cb9196d service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Received unexpected event network-vif-plugged-2d41a0d3-8eb3-4503-8363-6ec1b787de60 for instance with vm_state building and task_state spawning. [ 1786.419597] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1786.419597] env[62816]: value = "task-1788954" [ 1786.419597] env[62816]: _type = "Task" [ 1786.419597] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.428621] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788954, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.628492] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.803s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.646227] env[62816]: INFO nova.scheduler.client.report [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Deleted allocations for instance 31ac8296-14fa-46f7-b825-c31904b832d5 [ 1786.667968] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788953, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.756650] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 8ccce660-6c41-412d-99ac-65ca7915d728] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1786.788437] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.788786] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Instance network_info: |[{"id": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "address": "fa:16:3e:33:41:2b", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d41a0d3-8e", "ovs_interfaceid": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1786.789553] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:41:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d41a0d3-8eb3-4503-8363-6ec1b787de60', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.797413] env[62816]: DEBUG oslo.service.loopingcall [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1786.799612] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1786.800093] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0a67ef59-4169-4bf4-aa8b-d7ab6f371576 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.561s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.800927] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5af83dfe-ee72-4889-a30d-f52288b0af5d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.824298] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.824298] env[62816]: value = "task-1788955" [ 1786.824298] env[62816]: _type = "Task" [ 1786.824298] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.832017] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788955, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.847535] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1786.868177] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1786.868426] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1786.868587] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.868836] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1786.869033] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.869197] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1786.869408] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1786.869641] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1786.869816] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1786.869983] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1786.870173] env[62816]: DEBUG nova.virt.hardware [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1786.871258] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc923b3-c541-4828-9cf5-67f4ffab8490 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.879150] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1de31b1-67b2-4775-92ac-7c9063d54eee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.897547] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.897547] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.928879] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788954, 'name': Destroy_Task, 'duration_secs': 0.418727} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.929172] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroyed the VM [ 1786.929446] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1786.929938] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fa0de8cf-b283-40b8-8ee6-40a16f730c31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.935664] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1786.935664] env[62816]: value = "task-1788956" [ 1786.935664] env[62816]: _type = "Task" [ 1786.935664] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.943430] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788956, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.992564] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.992877] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.993071] env[62816]: INFO nova.compute.manager [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Attaching volume be3394e2-7a5f-4625-af79-3bd3e05d60b1 to /dev/sdb [ 1787.027011] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46d2b89-d58b-4785-ad02-ff62dceb84f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.034924] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0b5dff-a195-4027-97eb-08295c592b3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.048159] env[62816]: DEBUG nova.virt.block_device [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating existing volume attachment record: 08a921dd-c392-42ed-8554-3f7bb278009c {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1787.155555] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4bb92e70-5983-4bc9-8308-9d6afab86472 tempest-ServerActionsTestJSON-1628446667 tempest-ServerActionsTestJSON-1628446667-project-member] Lock "31ac8296-14fa-46f7-b825-c31904b832d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.769s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.176335] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788953, 'name': Destroy_Task, 'duration_secs': 0.538966} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.177235] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Destroyed the VM [ 1787.177557] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1787.178447] env[62816]: DEBUG nova.network.neutron [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Successfully updated port: 76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1787.179528] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-816e38cb-803b-40d1-b5a2-3370d8609e9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.188768] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1787.188768] env[62816]: value = "task-1788958" [ 1787.188768] env[62816]: _type = "Task" [ 1787.188768] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.203264] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788958, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.260139] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 75165526-2744-40b3-b311-45d13cc48cf1] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1787.333586] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788955, 'name': CreateVM_Task, 'duration_secs': 0.401626} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.333792] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1787.334629] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.334799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.335149] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1787.335461] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8250454-1fd9-4e51-8967-62385f2834d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.339992] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1787.339992] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528ee2cc-8c85-fddf-476e-2538e6cadbe1" [ 1787.339992] env[62816]: _type = "Task" [ 1787.339992] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.347281] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528ee2cc-8c85-fddf-476e-2538e6cadbe1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.400329] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1787.447867] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788956, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.682895] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "refresh_cache-913bba01-e64b-4b52-af94-5effcefc2677" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.683178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired lock "refresh_cache-913bba01-e64b-4b52-af94-5effcefc2677" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.684050] env[62816]: DEBUG nova.network.neutron [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1787.700811] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788958, 'name': RemoveSnapshot_Task, 'duration_secs': 0.424281} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.701088] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1787.701437] env[62816]: DEBUG nova.compute.manager [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1787.702233] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba5ce2f9-8ea3-44a1-99cf-baf6b769a95f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.764015] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c4117422-edd4-49a0-882c-2d8ae39b344d] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1787.852025] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528ee2cc-8c85-fddf-476e-2538e6cadbe1, 'name': SearchDatastore_Task, 'duration_secs': 0.010789} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.852025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.852025] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.852025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.852025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.852025] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.852025] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e194aca1-cbf9-49b7-a2b7-0a80ec8d936e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.859860] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.860052] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.860787] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6339c69-ba09-4b80-b5a2-cfeef1d49b18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.866014] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1787.866014] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a048f2-97c5-3f37-16df-c5a97d87449b" [ 1787.866014] env[62816]: _type = "Task" [ 1787.866014] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.874253] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a048f2-97c5-3f37-16df-c5a97d87449b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.924653] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.924806] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1787.926264] env[62816]: INFO nova.compute.claims [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1787.947514] env[62816]: DEBUG oslo_vmware.api [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788956, 'name': RemoveSnapshot_Task, 'duration_secs': 0.628988} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.947800] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1787.948315] env[62816]: INFO nova.compute.manager [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 15.35 seconds to snapshot the instance on the hypervisor. [ 1788.175367] env[62816]: INFO nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Rebuilding instance [ 1788.215303] env[62816]: DEBUG nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1788.216186] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9c740a-2435-4f2f-b314-455f72f5eb0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.219853] env[62816]: INFO nova.compute.manager [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Shelve offloading [ 1788.221572] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.221785] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e26ab31d-89fb-4ec9-8300-52aebd5ef04b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.227558] env[62816]: DEBUG nova.network.neutron [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1788.233824] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1788.233824] env[62816]: value = "task-1788961" [ 1788.233824] env[62816]: _type = "Task" [ 1788.233824] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.241290] env[62816]: INFO nova.compute.manager [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Rebuilding instance [ 1788.261158] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1788.261447] env[62816]: DEBUG nova.compute.manager [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1788.262206] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776b68f5-5af4-4b73-a23a-b58502509a52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.270752] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: ecf6469a-c110-4e29-b931-6f9a3b0144dc] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1788.282449] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.282685] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.282904] env[62816]: DEBUG nova.network.neutron [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1788.315061] env[62816]: DEBUG nova.compute.manager [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1788.315061] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67be40f5-121a-4f8c-a1da-88cd5543851f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.376062] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a048f2-97c5-3f37-16df-c5a97d87449b, 'name': SearchDatastore_Task, 'duration_secs': 0.009107} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.376872] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82b19969-b86a-4ff8-b8a1-e1a9934df880 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.382061] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1788.382061] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521b9810-be48-1ad1-5b87-8f242d1d646a" [ 1788.382061] env[62816]: _type = "Task" [ 1788.382061] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.394846] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521b9810-be48-1ad1-5b87-8f242d1d646a, 'name': SearchDatastore_Task, 'duration_secs': 0.009036} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.395159] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.395456] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] fa719ff5-0219-485f-aac7-2cde4bbef8f6/fa719ff5-0219-485f-aac7-2cde4bbef8f6.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1788.395750] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-30f28780-1fb1-42a3-89ea-54f6f3c3591a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.401977] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1788.401977] env[62816]: value = "task-1788962" [ 1788.401977] env[62816]: _type = "Task" [ 1788.401977] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.410219] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.454845] env[62816]: DEBUG nova.network.neutron [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Updating instance_info_cache with network_info: [{"id": "76777c72-f001-496f-82bf-037969fdb5b7", "address": "fa:16:3e:d5:a9:10", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76777c72-f0", "ovs_interfaceid": "76777c72-f001-496f-82bf-037969fdb5b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1788.488382] env[62816]: DEBUG nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Received event network-changed-2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1788.488629] env[62816]: DEBUG nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Refreshing instance network info cache due to event network-changed-2d41a0d3-8eb3-4503-8363-6ec1b787de60. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1788.489039] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Acquiring lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1788.489256] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Acquired lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1788.489580] env[62816]: DEBUG nova.network.neutron [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Refreshing network info cache for port 2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1788.492931] env[62816]: DEBUG nova.compute.manager [None req-2d5cb3d3-fc4c-41d5-86db-cf5001401365 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Found 1 images (rotation: 2) {{(pid=62816) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1788.736602] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.736993] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f7e3869-8297-47da-9e22-54bce52f0997 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.746707] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1788.746707] env[62816]: value = "task-1788963" [ 1788.746707] env[62816]: _type = "Task" [ 1788.746707] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.758114] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1788.758533] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1788.759376] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf21f06-926c-4182-81a4-10aaf375edab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.766506] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1788.766789] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6a1370d9-3716-4b4d-a33f-ca130b8da890 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.785418] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9c246982-b215-46c1-9cd3-63907a515086] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1788.823615] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1788.824584] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04ed0667-f90d-481f-832d-f6c2bcc957aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.835607] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1788.835607] env[62816]: value = "task-1788965" [ 1788.835607] env[62816]: _type = "Task" [ 1788.835607] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.847020] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.870198] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1788.870198] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1788.870529] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1788.870836] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b62ffde-aaeb-4ec9-bdce-3ce9913cd60e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.881703] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1788.881703] env[62816]: value = "task-1788966" [ 1788.881703] env[62816]: _type = "Task" [ 1788.881703] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.895642] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.911753] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500672} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.912076] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] fa719ff5-0219-485f-aac7-2cde4bbef8f6/fa719ff5-0219-485f-aac7-2cde4bbef8f6.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1788.912293] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1788.912540] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19f80434-1d21-4f9d-b88c-295387052dd4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.921326] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1788.921326] env[62816]: value = "task-1788967" [ 1788.921326] env[62816]: _type = "Task" [ 1788.921326] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.929016] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.961502] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Releasing lock "refresh_cache-913bba01-e64b-4b52-af94-5effcefc2677" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.961502] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Instance network_info: |[{"id": "76777c72-f001-496f-82bf-037969fdb5b7", "address": "fa:16:3e:d5:a9:10", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76777c72-f0", "ovs_interfaceid": "76777c72-f001-496f-82bf-037969fdb5b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1788.962484] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:a9:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2b6a4065-12af-4fb9-ac47-ec9143f7297e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '76777c72-f001-496f-82bf-037969fdb5b7', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1788.970602] env[62816]: DEBUG oslo.service.loopingcall [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1788.973849] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1788.976917] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bce2ba41-00cd-454a-9615-6107a13ef4c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.007201] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1789.007201] env[62816]: value = "task-1788968" [ 1789.007201] env[62816]: _type = "Task" [ 1789.007201] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.013660] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.013910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.014314] env[62816]: DEBUG nova.objects.instance [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'flavor' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1789.024081] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788968, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.072175] env[62816]: DEBUG nova.network.neutron [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.264672] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c0554e-0fc2-44f4-bb8b-6ebdc175c92e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.273251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcec3bb6-fb2f-4129-b3e1-e5eec5a5b1e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.306059] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: a60d4ff0-af76-4489-840b-ff7f6c23b2ab] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1789.311307] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc6ff0a-2815-4fac-bf7b-33c97d310b1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.320427] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3736ee2b-f7f6-4d75-8174-de18c7ac1c53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.336944] env[62816]: DEBUG nova.compute.provider_tree [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1789.345711] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788965, 'name': PowerOffVM_Task, 'duration_secs': 0.208415} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.345966] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1789.346646] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1789.346895] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8510a1c0-67fb-4c98-8141-61e8e81efe04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.353619] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1789.353619] env[62816]: value = "task-1788969" [ 1789.353619] env[62816]: _type = "Task" [ 1789.353619] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.361327] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788969, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.391088] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16916} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.391332] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1789.391516] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1789.391694] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1789.431323] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069387} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.431575] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1789.432331] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a358c12-5d08-4e57-945a-22ea08c5509c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.453690] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] fa719ff5-0219-485f-aac7-2cde4bbef8f6/fa719ff5-0219-485f-aac7-2cde4bbef8f6.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.453941] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-481b9422-16ae-4415-abda-541b68f6d7cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.472018] env[62816]: DEBUG nova.network.neutron [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updated VIF entry in instance network info cache for port 2d41a0d3-8eb3-4503-8363-6ec1b787de60. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1789.472366] env[62816]: DEBUG nova.network.neutron [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updating instance_info_cache with network_info: [{"id": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "address": "fa:16:3e:33:41:2b", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d41a0d3-8e", "ovs_interfaceid": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1789.474618] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1789.474618] env[62816]: value = "task-1788970" [ 1789.474618] env[62816]: _type = "Task" [ 1789.474618] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.482335] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788970, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.503595] env[62816]: DEBUG nova.compute.manager [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1789.504463] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057e660e-78e1-4486-be92-8fd69eb766d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.521289] env[62816]: DEBUG nova.objects.instance [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'pci_requests' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1789.522255] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788968, 'name': CreateVM_Task, 'duration_secs': 0.324266} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.522515] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1789.524216] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.524216] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.524216] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1789.524216] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-949c89d6-2711-4351-af7a-aa83af84186a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.528479] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1789.528479] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5270be34-68f2-04fe-f0cc-9d2ca52d15ff" [ 1789.528479] env[62816]: _type = "Task" [ 1789.528479] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.536682] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5270be34-68f2-04fe-f0cc-9d2ca52d15ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.577280] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.815133] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: ede88298-0eae-4471-b602-c26b5fa7a72a] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1789.842265] env[62816]: DEBUG nova.scheduler.client.report [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1789.865354] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1789.865675] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1789.865953] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371117', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'name': 'volume-b605cfce-b06c-4615-a606-12cb89b4a2d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9972b167-a950-4dba-ac02-068f66300053', 'attached_at': '', 'detached_at': '', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'serial': 'b605cfce-b06c-4615-a606-12cb89b4a2d4'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1789.866779] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9201b73e-18a4-443e-8c89-c8942f5bdb7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.889929] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72ff473-92c5-40b2-be56-5c883352a0ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.901268] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ee720c-3ea0-4015-93ab-850457b5af20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.921486] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a23cc02-b9b5-425d-b40a-017dc14eedda {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.937298] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] The volume has not been displaced from its original location: [datastore1] volume-b605cfce-b06c-4615-a606-12cb89b4a2d4/volume-b605cfce-b06c-4615-a606-12cb89b4a2d4.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1789.944168] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Reconfiguring VM instance instance-0000004f to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1789.944168] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea40e60b-3870-4cb3-9360-717ccb189656 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.964649] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1789.964649] env[62816]: value = "task-1788972" [ 1789.964649] env[62816]: _type = "Task" [ 1789.964649] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.973108] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788972, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.975849] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Releasing lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1789.976216] env[62816]: DEBUG nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Received event network-vif-plugged-76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.976481] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Acquiring lock "913bba01-e64b-4b52-af94-5effcefc2677-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.976747] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Lock "913bba01-e64b-4b52-af94-5effcefc2677-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.976955] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Lock "913bba01-e64b-4b52-af94-5effcefc2677-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.977211] env[62816]: DEBUG nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] No waiting events found dispatching network-vif-plugged-76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1789.977481] env[62816]: WARNING nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Received unexpected event network-vif-plugged-76777c72-f001-496f-82bf-037969fdb5b7 for instance with vm_state building and task_state spawning. [ 1789.977727] env[62816]: DEBUG nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Received event network-changed-76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.978065] env[62816]: DEBUG nova.compute.manager [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Refreshing instance network info cache due to event network-changed-76777c72-f001-496f-82bf-037969fdb5b7. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1789.978270] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Acquiring lock "refresh_cache-913bba01-e64b-4b52-af94-5effcefc2677" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.978414] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Acquired lock "refresh_cache-913bba01-e64b-4b52-af94-5effcefc2677" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.978580] env[62816]: DEBUG nova.network.neutron [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Refreshing network info cache for port 76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1789.990478] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788970, 'name': ReconfigVM_Task, 'duration_secs': 0.266816} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.992466] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Reconfigured VM instance instance-00000058 to attach disk [datastore1] fa719ff5-0219-485f-aac7-2cde4bbef8f6/fa719ff5-0219-485f-aac7-2cde4bbef8f6.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1789.994057] env[62816]: DEBUG nova.compute.manager [req-63bcfafe-56f1-4c79-a460-f52dd6f1e967 req-2661359c-f77d-4710-a9bc-6524aa08de40 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-vif-unplugged-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.994323] env[62816]: DEBUG oslo_concurrency.lockutils [req-63bcfafe-56f1-4c79-a460-f52dd6f1e967 req-2661359c-f77d-4710-a9bc-6524aa08de40 service nova] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.994582] env[62816]: DEBUG oslo_concurrency.lockutils [req-63bcfafe-56f1-4c79-a460-f52dd6f1e967 req-2661359c-f77d-4710-a9bc-6524aa08de40 service nova] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.994801] env[62816]: DEBUG oslo_concurrency.lockutils [req-63bcfafe-56f1-4c79-a460-f52dd6f1e967 req-2661359c-f77d-4710-a9bc-6524aa08de40 service nova] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.995062] env[62816]: DEBUG nova.compute.manager [req-63bcfafe-56f1-4c79-a460-f52dd6f1e967 req-2661359c-f77d-4710-a9bc-6524aa08de40 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] No waiting events found dispatching network-vif-unplugged-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1789.995329] env[62816]: WARNING nova.compute.manager [req-63bcfafe-56f1-4c79-a460-f52dd6f1e967 req-2661359c-f77d-4710-a9bc-6524aa08de40 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received unexpected event network-vif-unplugged-d0353b95-1d3d-4eab-9c03-374679fe2118 for instance with vm_state shelved and task_state shelving_offloading. [ 1789.995892] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-79afd478-57de-4fbd-ab05-f354e8b3733e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.003250] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1790.003250] env[62816]: value = "task-1788973" [ 1790.003250] env[62816]: _type = "Task" [ 1790.003250] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.014601] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788973, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.017825] env[62816]: INFO nova.compute.manager [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] instance snapshotting [ 1790.018476] env[62816]: DEBUG nova.objects.instance [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'flavor' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1790.023865] env[62816]: DEBUG nova.objects.base [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1790.023985] env[62816]: DEBUG nova.network.neutron [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1790.039437] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5270be34-68f2-04fe-f0cc-9d2ca52d15ff, 'name': SearchDatastore_Task, 'duration_secs': 0.010842} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.040456] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.040783] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1790.041288] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.041288] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.041510] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1790.042079] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c5292d4-071d-43df-be13-e16cc8bfbb85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.052903] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1790.053219] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1790.054165] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca46d44d-29cc-445c-957f-fee204653f51 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.059320] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1790.059320] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524c4a76-642b-7d3d-42e5-63a9beb0f7f2" [ 1790.059320] env[62816]: _type = "Task" [ 1790.059320] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.070518] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524c4a76-642b-7d3d-42e5-63a9beb0f7f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.071563] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1790.072457] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567e7fd9-2232-4cc8-b6eb-650a2b3bd90a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.079114] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1790.080031] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41bf3b0b-cc1f-4f1c-a80a-541b0524e633 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.156337] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6fa4adaf-8887-4787-8796-2f130485e644 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.142s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.225065] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1790.225065] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1790.225065] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleting the datastore file [datastore1] f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1790.225262] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47cc6544-8fe0-4117-b0a3-70ed0b8f919f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.231994] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1790.231994] env[62816]: value = "task-1788975" [ 1790.231994] env[62816]: _type = "Task" [ 1790.231994] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.240600] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788975, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.318562] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: b788e586-850b-46e7-a204-d80eac56cce7] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1790.348074] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1790.348074] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1790.430269] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1790.430549] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1790.430736] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1790.430930] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1790.431092] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1790.431241] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1790.431446] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1790.431604] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1790.431771] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1790.431931] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1790.432121] env[62816]: DEBUG nova.virt.hardware [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1790.432972] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f857f322-146b-47e8-9f52-4eb034acd8d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.441016] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae18f616-9aa7-42ac-a8d0-46eecd80299e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.454260] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:50:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1790.461670] env[62816]: DEBUG oslo.service.loopingcall [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.461900] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1790.462110] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-305d62c4-939b-4c2e-9757-a853855632e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.484210] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788972, 'name': ReconfigVM_Task, 'duration_secs': 0.358489} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.485471] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Reconfigured VM instance instance-0000004f to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1790.491950] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1790.491950] env[62816]: value = "task-1788976" [ 1790.491950] env[62816]: _type = "Task" [ 1790.491950] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.492150] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4df180b1-7db0-4b09-ba94-f479c33739da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.510530] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1790.510530] env[62816]: value = "task-1788977" [ 1790.510530] env[62816]: _type = "Task" [ 1790.510530] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.516437] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788976, 'name': CreateVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.516619] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788973, 'name': Rename_Task, 'duration_secs': 0.187924} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.519923] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1790.520511] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-123f21af-1e26-4682-a532-9c4cb13c247e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.525157] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7496355-137c-474f-b040-193ef9026030 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.530716] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788977, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.534111] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1790.534111] env[62816]: value = "task-1788978" [ 1790.534111] env[62816]: _type = "Task" [ 1790.534111] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.556273] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e72dd6-b043-4d46-85c3-aeb7e7c1c611 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.559245] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.572522] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524c4a76-642b-7d3d-42e5-63a9beb0f7f2, 'name': SearchDatastore_Task, 'duration_secs': 0.033545} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.573295] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5294f78b-bba8-4957-a352-862b7290aeee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.578353] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1790.578353] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525fe9d2-06e7-f72e-ffa9-0ccec4316158" [ 1790.578353] env[62816]: _type = "Task" [ 1790.578353] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.586057] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525fe9d2-06e7-f72e-ffa9-0ccec4316158, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.731515] env[62816]: DEBUG nova.network.neutron [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Updated VIF entry in instance network info cache for port 76777c72-f001-496f-82bf-037969fdb5b7. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1790.732225] env[62816]: DEBUG nova.network.neutron [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Updating instance_info_cache with network_info: [{"id": "76777c72-f001-496f-82bf-037969fdb5b7", "address": "fa:16:3e:d5:a9:10", "network": {"id": "016fd9c6-3eba-4116-b0e5-7cd2f2db60a6", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1000920869-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b1b175f09c47457ead5fff6d3ecf1cee", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2b6a4065-12af-4fb9-ac47-ec9143f7297e", "external-id": "nsx-vlan-transportzone-95", "segmentation_id": 95, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap76777c72-f0", "ovs_interfaceid": "76777c72-f001-496f-82bf-037969fdb5b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.742289] env[62816]: DEBUG oslo_vmware.api [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1788975, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.317619} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.742576] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.743350] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.743350] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.773668] env[62816]: INFO nova.scheduler.client.report [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted allocations for instance f97ea34e-792e-4023-bd2f-549dba129925 [ 1790.822151] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 65e97c6a-5d8f-4241-9095-65a5a6132a69] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1790.852765] env[62816]: DEBUG nova.compute.utils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1790.855189] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1790.855189] env[62816]: DEBUG nova.network.neutron [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1790.908302] env[62816]: DEBUG nova.policy [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da85accbf2ae484aafdf85030398de3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e34ce2b6acac4ef08fd6b7d37dabef09', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1791.011517] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788976, 'name': CreateVM_Task, 'duration_secs': 0.352364} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.011767] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1791.012591] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.012809] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.013258] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1791.013563] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-776bd942-28d1-48d9-94ec-85138c26f9f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.021442] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1791.021442] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ebe3dd-bbd8-803d-da38-7b8521b8cdad" [ 1791.021442] env[62816]: _type = "Task" [ 1791.021442] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.028165] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788977, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.032826] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ebe3dd-bbd8-803d-da38-7b8521b8cdad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.043262] env[62816]: DEBUG oslo_vmware.api [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1788978, 'name': PowerOnVM_Task, 'duration_secs': 0.466041} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.043549] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1791.043790] env[62816]: INFO nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Took 6.50 seconds to spawn the instance on the hypervisor. [ 1791.044101] env[62816]: DEBUG nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1791.045198] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48eabb5-9f45-4e38-b506-270b2fb8a0f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.069589] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1791.069886] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6d037013-f57d-43c4-a61d-6e2279ca77d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.076542] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1791.076542] env[62816]: value = "task-1788979" [ 1791.076542] env[62816]: _type = "Task" [ 1791.076542] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.087169] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788979, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.090587] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525fe9d2-06e7-f72e-ffa9-0ccec4316158, 'name': SearchDatastore_Task, 'duration_secs': 0.011219} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.091477] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.091735] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 913bba01-e64b-4b52-af94-5effcefc2677/913bba01-e64b-4b52-af94-5effcefc2677.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1791.092009] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66899fe8-ed63-4ed3-9fe9-889e5cde7e1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.097845] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1791.097845] env[62816]: value = "task-1788980" [ 1791.097845] env[62816]: _type = "Task" [ 1791.097845] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.108120] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.237631] env[62816]: DEBUG oslo_concurrency.lockutils [req-aeebea5a-9cfa-489f-ac55-2bd4c03797ac req-8b5d583e-6d4b-4c86-b8dd-346de142ea7c service nova] Releasing lock "refresh_cache-913bba01-e64b-4b52-af94-5effcefc2677" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.240535] env[62816]: DEBUG nova.network.neutron [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Successfully created port: 1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1791.278128] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.278764] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.278764] env[62816]: DEBUG nova.objects.instance [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'resources' on Instance uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.329073] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: d34b7828-542e-4b66-a923-644d0d0f4866] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1791.359983] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1791.534282] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788977, 'name': ReconfigVM_Task, 'duration_secs': 0.878327} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.537994] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371117', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'name': 'volume-b605cfce-b06c-4615-a606-12cb89b4a2d4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9972b167-a950-4dba-ac02-068f66300053', 'attached_at': '', 'detached_at': '', 'volume_id': 'b605cfce-b06c-4615-a606-12cb89b4a2d4', 'serial': 'b605cfce-b06c-4615-a606-12cb89b4a2d4'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1791.538412] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1791.538780] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ebe3dd-bbd8-803d-da38-7b8521b8cdad, 'name': SearchDatastore_Task, 'duration_secs': 0.023956} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.539587] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f087c5c2-fa86-400e-b089-381907af5ed1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.542720] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.543048] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1791.543355] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.543654] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.545169] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1791.545169] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3bd01038-b6e6-4496-be0d-72364b17da5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.553143] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1791.553802] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4eb81d03-0fd2-4b6d-a8d9-8218a631b202 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.564987] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1791.565309] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1791.570238] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9edbba03-adf0-4eae-a1da-46d7acf70860 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.573953] env[62816]: INFO nova.compute.manager [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Took 16.97 seconds to build instance. [ 1791.578586] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1791.578586] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d3ba03-ce3f-ff73-fd6e-f04efb2f8ddc" [ 1791.578586] env[62816]: _type = "Task" [ 1791.578586] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.593724] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788979, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.600565] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d3ba03-ce3f-ff73-fd6e-f04efb2f8ddc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.611217] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788980, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.783295] env[62816]: DEBUG nova.objects.instance [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'numa_topology' on Instance uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.789102] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1791.789102] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1791.789102] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Deleting the datastore file [datastore1] 9972b167-a950-4dba-ac02-068f66300053 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1791.789102] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11e58c56-b293-4440-b588-69b1084af5a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.794483] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for the task: (returnval){ [ 1791.794483] env[62816]: value = "task-1788982" [ 1791.794483] env[62816]: _type = "Task" [ 1791.794483] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.805891] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.833416] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 1f80a18c-4406-4f13-8ad1-5b5d29f0d8ac] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1792.074891] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.075227] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.075652] env[62816]: DEBUG nova.objects.instance [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'flavor' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.077074] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f572d9b7-5ee2-4475-90ac-6e67721d56b7 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.484s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.093733] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788979, 'name': CreateSnapshot_Task, 'duration_secs': 0.989186} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.099044] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1792.099361] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d3ba03-ce3f-ff73-fd6e-f04efb2f8ddc, 'name': SearchDatastore_Task, 'duration_secs': 0.051383} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.100469] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1792.100693] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371155', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'name': 'volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b9e8af08-9579-4dbf-8ea1-35ffab75e159', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'serial': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1792.101430] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02fcdaaa-651c-458c-b31b-75d8de2cb804 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.105021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e62ca13d-a6af-4fd8-a894-f2bf259cc395 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.107570] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2330868-2a62-4b74-bbd1-29dc32679d37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.134808] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788980, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624672} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.135145] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1792.135145] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ce505e-e654-220f-45d0-959f28153025" [ 1792.135145] env[62816]: _type = "Task" [ 1792.135145] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.135997] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 913bba01-e64b-4b52-af94-5effcefc2677/913bba01-e64b-4b52-af94-5effcefc2677.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1792.136229] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1792.136986] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4feafc48-af60-4f4a-8c4c-2d1f83576aea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.139504] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04736963-22b7-4a5e-9a0d-4457fc64fbeb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.153429] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ce505e-e654-220f-45d0-959f28153025, 'name': SearchDatastore_Task, 'duration_secs': 0.011256} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.167754] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.168064] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1792.168404] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1792.168404] env[62816]: value = "task-1788983" [ 1792.168404] env[62816]: _type = "Task" [ 1792.168404] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.176618] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1/volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.176867] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9f5c871-6507-43cc-a6aa-17a40ea5e09b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.179432] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5499b64f-b003-4e85-bf38-bce6059645b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.205206] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1792.205206] env[62816]: value = "task-1788985" [ 1792.205206] env[62816]: _type = "Task" [ 1792.205206] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.205641] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1792.205641] env[62816]: value = "task-1788984" [ 1792.205641] env[62816]: _type = "Task" [ 1792.205641] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.221359] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788984, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.225112] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788985, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.284856] env[62816]: DEBUG nova.objects.base [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1792.307983] env[62816]: DEBUG oslo_vmware.api [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Task: {'id': task-1788982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.094645} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.308922] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1792.308922] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1792.309106] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1792.335176] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: ee543138-1c43-46c4-a512-1977fa5eb3c6] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1792.371843] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1792.390299] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1792.390299] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abca1422-db15-450e-b3a4-41ddf28a4ab6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.413193] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9571a090-5a55-44a5-b07c-3fbf6aceef29 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.428661] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1792.428983] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1792.429168] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1792.429356] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1792.429500] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1792.429810] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1792.429962] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1792.430201] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1792.430406] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1792.430623] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1792.430840] env[62816]: DEBUG nova.virt.hardware [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1792.436665] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b85d9b2a-655e-49e1-bdbb-5e7093092b55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.446839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30026b5a-6305-4215-99c9-af4c99543c77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.466309] env[62816]: ERROR nova.compute.manager [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Failed to detach volume b605cfce-b06c-4615-a606-12cb89b4a2d4 from /dev/sda: nova.exception.InstanceNotFound: Instance 9972b167-a950-4dba-ac02-068f66300053 could not be found. [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] Traceback (most recent call last): [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self.driver.rebuild(**kwargs) [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] raise NotImplementedError() [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] NotImplementedError [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] During handling of the above exception, another exception occurred: [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] Traceback (most recent call last): [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self.driver.detach_volume(context, old_connection_info, [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] return self._volumeops.detach_volume(connection_info, instance) [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._detach_volume_vmdk(connection_info, instance) [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] stable_ref.fetch_moref(session) [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] nova.exception.InstanceNotFound: Instance 9972b167-a950-4dba-ac02-068f66300053 could not be found. [ 1792.466309] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.539821] env[62816]: DEBUG nova.compute.manager [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1792.540048] env[62816]: DEBUG nova.compute.manager [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing instance network info cache due to event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1792.540277] env[62816]: DEBUG oslo_concurrency.lockutils [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.540436] env[62816]: DEBUG oslo_concurrency.lockutils [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.540594] env[62816]: DEBUG nova.network.neutron [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1792.594320] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1379eba5-38c5-44f4-851f-f3f12b1f9e70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.604842] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89208cfc-9010-4a49-9209-5f9595fa137e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.647436] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1792.649477] env[62816]: DEBUG nova.compute.utils [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Build of instance 9972b167-a950-4dba-ac02-068f66300053 aborted: Failed to rebuild volume backed instance. {{(pid=62816) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1792.650633] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7376e591-19bf-4ece-a53a-08344bed5165 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.654307] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e605a398-46ca-46af-9591-28a5ea0bebd1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.658038] env[62816]: ERROR nova.compute.manager [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 9972b167-a950-4dba-ac02-068f66300053 aborted: Failed to rebuild volume backed instance. [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] Traceback (most recent call last): [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self.driver.rebuild(**kwargs) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] raise NotImplementedError() [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] NotImplementedError [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] During handling of the above exception, another exception occurred: [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] Traceback (most recent call last): [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._detach_root_volume(context, instance, root_bdm) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] with excutils.save_and_reraise_exception(): [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self.force_reraise() [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] raise self.value [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self.driver.detach_volume(context, old_connection_info, [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] return self._volumeops.detach_volume(connection_info, instance) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._detach_volume_vmdk(connection_info, instance) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] stable_ref.fetch_moref(session) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] nova.exception.InstanceNotFound: Instance 9972b167-a950-4dba-ac02-068f66300053 could not be found. [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] During handling of the above exception, another exception occurred: [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] Traceback (most recent call last): [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 10866, in _error_out_instance_on_exception [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] yield [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1792.658038] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._do_rebuild_instance_with_claim( [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._do_rebuild_instance( [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._rebuild_default_impl(**kwargs) [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] self._rebuild_volume_backed_instance( [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] raise exception.BuildAbortException( [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] nova.exception.BuildAbortException: Build of instance 9972b167-a950-4dba-ac02-068f66300053 aborted: Failed to rebuild volume backed instance. [ 1792.659440] env[62816]: ERROR nova.compute.manager [instance: 9972b167-a950-4dba-ac02-068f66300053] [ 1792.669776] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb5c40d-acf9-4d3b-b2d8-f7b632e430e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.673848] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1792.673848] env[62816]: value = "task-1788986" [ 1792.673848] env[62816]: _type = "Task" [ 1792.673848] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.686360] env[62816]: DEBUG nova.compute.provider_tree [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.694923] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788986, 'name': CloneVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.700178] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.120747} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.700178] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1792.700178] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a25572-fe2f-4ebe-a737-596debe92d07 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.724024] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 913bba01-e64b-4b52-af94-5effcefc2677/913bba01-e64b-4b52-af94-5effcefc2677.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1792.725944] env[62816]: DEBUG nova.objects.instance [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'pci_requests' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1792.732955] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2229ee81-655a-4860-a8bb-04d165890056 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.748053] env[62816]: DEBUG nova.compute.manager [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Received event network-changed-2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1792.748255] env[62816]: DEBUG nova.compute.manager [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Refreshing instance network info cache due to event network-changed-2d41a0d3-8eb3-4503-8363-6ec1b787de60. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1792.748466] env[62816]: DEBUG oslo_concurrency.lockutils [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] Acquiring lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.748607] env[62816]: DEBUG oslo_concurrency.lockutils [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] Acquired lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.748777] env[62816]: DEBUG nova.network.neutron [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Refreshing network info cache for port 2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1792.761990] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.762473] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788984, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460811} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.763187] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1792.763187] env[62816]: value = "task-1788987" [ 1792.763187] env[62816]: _type = "Task" [ 1792.763187] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.763535] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1792.763616] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1792.763914] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-973d64d0-391b-45ad-b40e-31d631c62217 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.776882] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.778474] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1792.778474] env[62816]: value = "task-1788988" [ 1792.778474] env[62816]: _type = "Task" [ 1792.778474] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.787371] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.838141] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3c4cca03-b2ee-48a2-9a15-a21124bd6599] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1793.043094] env[62816]: DEBUG nova.network.neutron [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Successfully updated port: 1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1793.194643] env[62816]: DEBUG nova.scheduler.client.report [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1793.204544] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788986, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.227308] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788985, 'name': ReconfigVM_Task, 'duration_secs': 0.635726} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.227596] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfigured VM instance instance-0000004e to attach disk [datastore1] volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1/volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.233151] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40929fe3-1966-4551-84d0-f2b947b02ec9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.254387] env[62816]: DEBUG nova.objects.base [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1793.254607] env[62816]: DEBUG nova.network.neutron [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1793.260307] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1793.260307] env[62816]: value = "task-1788989" [ 1793.260307] env[62816]: _type = "Task" [ 1793.260307] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.274576] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788989, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.281021] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.289847] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.196571} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.290163] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1793.290984] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481fbdc6-dbc5-44b4-beec-51d4ed342cc6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.314491] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1793.317163] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c991b2b3-8a2c-4ba9-989e-6e6a362f3745 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.344020] env[62816]: DEBUG nova.policy [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1793.345316] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 049e1f97-ab58-4797-a084-f16a7a58e2cc] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1793.352978] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1793.352978] env[62816]: value = "task-1788990" [ 1793.352978] env[62816]: _type = "Task" [ 1793.352978] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.362564] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788990, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.550803] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "refresh_cache-bd5482f1-8884-49fa-9e9c-7873eadeefe0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.550803] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired lock "refresh_cache-bd5482f1-8884-49fa-9e9c-7873eadeefe0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.550803] env[62816]: DEBUG nova.network.neutron [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1793.694772] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788986, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.694772] env[62816]: DEBUG nova.network.neutron [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updated VIF entry in instance network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1793.694772] env[62816]: DEBUG nova.network.neutron [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd0353b95-1d", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.704527] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.425s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.753818] env[62816]: DEBUG nova.network.neutron [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Successfully created port: 2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1793.790020] env[62816]: DEBUG oslo_vmware.api [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1788989, 'name': ReconfigVM_Task, 'duration_secs': 0.222819} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.790855] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788987, 'name': ReconfigVM_Task, 'duration_secs': 0.968445} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.791191] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371155', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'name': 'volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b9e8af08-9579-4dbf-8ea1-35ffab75e159', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'serial': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1793.792721] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 913bba01-e64b-4b52-af94-5effcefc2677/913bba01-e64b-4b52-af94-5effcefc2677.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1793.793391] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-09a13f62-2893-4719-bfcf-c652b9d22155 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.802787] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1793.802787] env[62816]: value = "task-1788991" [ 1793.802787] env[62816]: _type = "Task" [ 1793.802787] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.813563] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788991, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.848984] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: a01e772c-dafe-4091-bae6-f9f59d5c972d] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1793.865015] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788990, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.910624] env[62816]: DEBUG nova.network.neutron [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updated VIF entry in instance network info cache for port 2d41a0d3-8eb3-4503-8363-6ec1b787de60. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1793.911012] env[62816]: DEBUG nova.network.neutron [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updating instance_info_cache with network_info: [{"id": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "address": "fa:16:3e:33:41:2b", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.165", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d41a0d3-8e", "ovs_interfaceid": "2d41a0d3-8eb3-4503-8363-6ec1b787de60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1793.996542] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.087688] env[62816]: DEBUG nova.network.neutron [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1794.190383] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788986, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.197063] env[62816]: DEBUG oslo_concurrency.lockutils [req-dabc9ce9-b9ae-417e-abf5-1dceb993b133 req-9193447d-c420-4951-a704-035f2bb9d999 service nova] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.212171] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e93b409d-9aa3-4002-b1d2-df2564b9a947 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.575s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.213203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.217s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.213482] env[62816]: INFO nova.compute.manager [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Unshelving [ 1794.271899] env[62816]: DEBUG nova.network.neutron [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Updating instance_info_cache with network_info: [{"id": "1808e3a6-539d-4c7f-8c88-b2ed44ad1368", "address": "fa:16:3e:64:c4:27", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1808e3a6-53", "ovs_interfaceid": "1808e3a6-539d-4c7f-8c88-b2ed44ad1368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.313275] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788991, 'name': Rename_Task, 'duration_secs': 0.218892} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.314134] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1794.314217] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ee5a6a27-135b-4b87-8bd7-63879a716123 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.321282] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1794.321282] env[62816]: value = "task-1788992" [ 1794.321282] env[62816]: _type = "Task" [ 1794.321282] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.330938] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788992, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.353119] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 83f7b5b8-228b-4d17-ab52-8df65fe247e3] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1794.365961] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788990, 'name': ReconfigVM_Task, 'duration_secs': 0.545995} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.365961] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Reconfigured VM instance instance-00000057 to attach disk [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f/a50b78c5-bb7e-4038-9a74-ecde2042828f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1794.366567] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d424785-f0f2-45f2-b66c-25f0e84fe85c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.375027] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1794.375027] env[62816]: value = "task-1788993" [ 1794.375027] env[62816]: _type = "Task" [ 1794.375027] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.383804] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788993, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.414977] env[62816]: DEBUG oslo_concurrency.lockutils [req-add2d98c-3bfc-49ff-a129-1430ab9a0855 req-ced27e32-8a7c-405f-a02e-d1f500e105a1 service nova] Releasing lock "refresh_cache-fa719ff5-0219-485f-aac7-2cde4bbef8f6" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.679022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.679022] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.693362] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788986, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.775785] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Releasing lock "refresh_cache-bd5482f1-8884-49fa-9e9c-7873eadeefe0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.776324] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Instance network_info: |[{"id": "1808e3a6-539d-4c7f-8c88-b2ed44ad1368", "address": "fa:16:3e:64:c4:27", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1808e3a6-53", "ovs_interfaceid": "1808e3a6-539d-4c7f-8c88-b2ed44ad1368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1794.777567] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:c4:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1808e3a6-539d-4c7f-8c88-b2ed44ad1368', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1794.791529] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Creating folder: Project (e34ce2b6acac4ef08fd6b7d37dabef09). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1794.792283] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-876c4e77-a3ac-4cdb-81ff-f8f4277d54a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.813460] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Created folder: Project (e34ce2b6acac4ef08fd6b7d37dabef09) in parent group-v370905. [ 1794.813713] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Creating folder: Instances. Parent ref: group-v371160. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1794.813920] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e1f4db8-d486-4fcf-a255-5de3c78fed9b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.826204] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Created folder: Instances in parent group-v371160. [ 1794.826470] env[62816]: DEBUG oslo.service.loopingcall [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1794.827206] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1794.828036] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0af5991-afe0-4212-95ab-d85f1e389e91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.848418] env[62816]: DEBUG nova.objects.instance [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'flavor' on Instance uuid b9e8af08-9579-4dbf-8ea1-35ffab75e159 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1794.849753] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788992, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.856726] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1794.856726] env[62816]: value = "task-1788996" [ 1794.856726] env[62816]: _type = "Task" [ 1794.856726] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.860250] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f9d9593a-1c25-47a1-98fd-4462a851f134] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1794.871556] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788996, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.887750] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788993, 'name': Rename_Task, 'duration_secs': 0.197038} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.888647] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1794.888910] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-637d71de-98d7-4fd9-bb7c-4ab910cb4450 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.896411] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1794.896411] env[62816]: value = "task-1788997" [ 1794.896411] env[62816]: _type = "Task" [ 1794.896411] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.905426] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.985229] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85209b59-eddb-484a-abaf-47c72289a64c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.993148] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b360b1-6c80-4714-9640-0b6f40382295 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.025773] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd56fc53-d6d3-4e89-bc2e-f99f24ac4685 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.033734] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35fd4ba-b005-4b6d-b6c9-b4ade6b88023 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.047379] env[62816]: DEBUG nova.compute.provider_tree [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1795.088086] env[62816]: DEBUG nova.compute.manager [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Received event network-vif-plugged-1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.088086] env[62816]: DEBUG oslo_concurrency.lockutils [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] Acquiring lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.088086] env[62816]: DEBUG oslo_concurrency.lockutils [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.092117] env[62816]: DEBUG oslo_concurrency.lockutils [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.092400] env[62816]: DEBUG nova.compute.manager [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] No waiting events found dispatching network-vif-plugged-1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.092586] env[62816]: WARNING nova.compute.manager [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Received unexpected event network-vif-plugged-1808e3a6-539d-4c7f-8c88-b2ed44ad1368 for instance with vm_state building and task_state spawning. [ 1795.092755] env[62816]: DEBUG nova.compute.manager [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Received event network-changed-1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.092917] env[62816]: DEBUG nova.compute.manager [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Refreshing instance network info cache due to event network-changed-1808e3a6-539d-4c7f-8c88-b2ed44ad1368. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1795.093136] env[62816]: DEBUG oslo_concurrency.lockutils [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] Acquiring lock "refresh_cache-bd5482f1-8884-49fa-9e9c-7873eadeefe0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.093343] env[62816]: DEBUG oslo_concurrency.lockutils [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] Acquired lock "refresh_cache-bd5482f1-8884-49fa-9e9c-7873eadeefe0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.093446] env[62816]: DEBUG nova.network.neutron [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Refreshing network info cache for port 1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1795.194377] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1788986, 'name': CloneVM_Task, 'duration_secs': 2.05195} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.194377] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created linked-clone VM from snapshot [ 1795.194377] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978c7640-b4c7-44fa-807d-3cc723a54235 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.202941] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Uploading image 1d23fc8e-ee35-45ba-ad66-5e83269e15a0 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1795.229008] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1795.229008] env[62816]: value = "vm-371159" [ 1795.229008] env[62816]: _type = "VirtualMachine" [ 1795.229008] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1795.229439] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1eab1aa9-b44f-4c7a-a6a5-138170e3d5e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.237590] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease: (returnval){ [ 1795.237590] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52923743-8ffa-58ab-1b1f-aecb1b2cef6c" [ 1795.237590] env[62816]: _type = "HttpNfcLease" [ 1795.237590] env[62816]: } obtained for exporting VM: (result){ [ 1795.237590] env[62816]: value = "vm-371159" [ 1795.237590] env[62816]: _type = "VirtualMachine" [ 1795.237590] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1795.238057] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the lease: (returnval){ [ 1795.238057] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52923743-8ffa-58ab-1b1f-aecb1b2cef6c" [ 1795.238057] env[62816]: _type = "HttpNfcLease" [ 1795.238057] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1795.244415] env[62816]: DEBUG nova.compute.utils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1795.250050] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1795.250050] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52923743-8ffa-58ab-1b1f-aecb1b2cef6c" [ 1795.250050] env[62816]: _type = "HttpNfcLease" [ 1795.250050] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1795.334421] env[62816]: DEBUG oslo_vmware.api [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1788992, 'name': PowerOnVM_Task, 'duration_secs': 0.630343} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.335339] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1795.335540] env[62816]: INFO nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Took 8.49 seconds to spawn the instance on the hypervisor. [ 1795.335884] env[62816]: DEBUG nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1795.337202] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e161da4-a1a1-496f-88a4-cb3dc1313a6f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.352877] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f1a62111-5ac6-4d53-bf0a-0bb0fa6595ca tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.360s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.367432] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 1056fc6e-af1e-4d63-a9ce-9ade4dd73891] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1795.370124] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1788996, 'name': CreateVM_Task, 'duration_secs': 0.395358} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.370935] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1795.371749] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.371981] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.372348] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1795.372842] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ff51f1f-23a9-496c-b281-f83cd85af553 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.377946] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1795.377946] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52adc6a4-5595-ab92-2c1a-bb2e28ce59ff" [ 1795.377946] env[62816]: _type = "Task" [ 1795.377946] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.387477] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52adc6a4-5595-ab92-2c1a-bb2e28ce59ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.408716] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788997, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.455368] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.455591] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.551832] env[62816]: DEBUG nova.scheduler.client.report [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1795.748175] env[62816]: INFO nova.virt.block_device [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Booting with volume fce3ab81-3139-4d04-bdd8-7bec1dfedee2 at /dev/sdb [ 1795.749768] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1795.749768] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52923743-8ffa-58ab-1b1f-aecb1b2cef6c" [ 1795.749768] env[62816]: _type = "HttpNfcLease" [ 1795.749768] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1795.752950] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1795.752950] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52923743-8ffa-58ab-1b1f-aecb1b2cef6c" [ 1795.752950] env[62816]: _type = "HttpNfcLease" [ 1795.752950] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1795.757712] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e070708c-fea1-45f9-b9e6-61a20c758647 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.765987] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5202a574-4959-9cb5-0cbd-7f0966868c29/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1795.766079] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5202a574-4959-9cb5-0cbd-7f0966868c29/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1795.830224] env[62816]: INFO nova.compute.manager [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Rescuing [ 1795.830492] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.830643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.830814] env[62816]: DEBUG nova.network.neutron [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1795.833061] env[62816]: DEBUG nova.network.neutron [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Successfully updated port: 2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1795.837914] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c80bf6d-2194-440b-9e25-49aae4caf7cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.853061] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10e4a9f-512f-417e-b10c-939c43222e03 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.870095] env[62816]: INFO nova.compute.manager [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Took 19.33 seconds to build instance. [ 1795.871794] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: ff6d52f9-6c11-4d7e-bd7f-4f6e307e3056] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1795.875128] env[62816]: DEBUG nova.compute.manager [req-1ef6758e-dd5f-4939-b5dd-1ba211db6308 req-b5b1b84d-ed15-4fcc-be0d-ade480e2c23b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-vif-plugged-2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1795.875128] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ef6758e-dd5f-4939-b5dd-1ba211db6308 req-b5b1b84d-ed15-4fcc-be0d-ade480e2c23b service nova] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.875128] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ef6758e-dd5f-4939-b5dd-1ba211db6308 req-b5b1b84d-ed15-4fcc-be0d-ade480e2c23b service nova] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.875128] env[62816]: DEBUG oslo_concurrency.lockutils [req-1ef6758e-dd5f-4939-b5dd-1ba211db6308 req-b5b1b84d-ed15-4fcc-be0d-ade480e2c23b service nova] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.875286] env[62816]: DEBUG nova.compute.manager [req-1ef6758e-dd5f-4939-b5dd-1ba211db6308 req-b5b1b84d-ed15-4fcc-be0d-ade480e2c23b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] No waiting events found dispatching network-vif-plugged-2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1795.875410] env[62816]: WARNING nova.compute.manager [req-1ef6758e-dd5f-4939-b5dd-1ba211db6308 req-b5b1b84d-ed15-4fcc-be0d-ade480e2c23b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received unexpected event network-vif-plugged-2ce4cb59-c403-4e6e-8aa0-30efaa416644 for instance with vm_state active and task_state None. [ 1795.895892] env[62816]: DEBUG nova.network.neutron [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Updated VIF entry in instance network info cache for port 1808e3a6-539d-4c7f-8c88-b2ed44ad1368. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1795.896259] env[62816]: DEBUG nova.network.neutron [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Updating instance_info_cache with network_info: [{"id": "1808e3a6-539d-4c7f-8c88-b2ed44ad1368", "address": "fa:16:3e:64:c4:27", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1808e3a6-53", "ovs_interfaceid": "1808e3a6-539d-4c7f-8c88-b2ed44ad1368", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.901039] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4fe3e08f-4e91-4073-a08e-99dbde257040 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.909424] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52adc6a4-5595-ab92-2c1a-bb2e28ce59ff, 'name': SearchDatastore_Task, 'duration_secs': 0.027027} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.910359] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.910589] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1795.910822] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.910971] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.911165] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1795.911628] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9748a3eb-07a1-49b6-ba2e-5d6a8e3c4add {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.917202] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1788997, 'name': PowerOnVM_Task, 'duration_secs': 0.532103} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.917818] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1795.918045] env[62816]: DEBUG nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1795.920829] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edb64c4-a206-4c50-bfa4-3530e827657e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.932077] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d40252f-9746-4d18-9ba6-0147456ae926 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.935018] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1795.935199] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1795.936179] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef2de893-644b-4905-afb8-cb5618106e57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.940830] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-53ef4052-d2cd-4a64-8f1b-23f492a6e1da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.945141] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1795.945141] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52623cf5-f7a8-dcab-2937-203c7d92685d" [ 1795.945141] env[62816]: _type = "Task" [ 1795.945141] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.959265] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1795.969380] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e44dc5c-334e-4c21-b9e6-511987836a78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.977018] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52623cf5-f7a8-dcab-2937-203c7d92685d, 'name': SearchDatastore_Task, 'duration_secs': 0.013747} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.980816] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0423b0ed-1619-4eb4-a022-f79bdc8db138 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.983490] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15dc7451-519b-4b0e-a366-1c6f83bf3433 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.990276] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1795.990276] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f0de64-6aee-eebe-1c27-cced51e24afe" [ 1795.990276] env[62816]: _type = "Task" [ 1795.990276] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.002026] env[62816]: DEBUG nova.virt.block_device [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating existing volume attachment record: fa9fcf07-6b53-42ad-b97d-5ec36ba509b2 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1796.007334] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f0de64-6aee-eebe-1c27-cced51e24afe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.057133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.380s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.057362] env[62816]: INFO nova.compute.manager [None req-ee08c5fb-442e-4cb8-b505-2b51b659aaa9 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Successfully reverted task state from rebuilding on failure for instance. [ 1796.342716] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.343037] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.343297] env[62816]: DEBUG nova.network.neutron [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1796.348147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "9972b167-a950-4dba-ac02-068f66300053" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.348446] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "9972b167-a950-4dba-ac02-068f66300053" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.349041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "9972b167-a950-4dba-ac02-068f66300053-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.349041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "9972b167-a950-4dba-ac02-068f66300053-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.349195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "9972b167-a950-4dba-ac02-068f66300053-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.351556] env[62816]: INFO nova.compute.manager [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Terminating instance [ 1796.353966] env[62816]: DEBUG nova.compute.manager [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1796.354082] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6509048a-ad02-4d32-a3c2-46985d3a1c85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.365784] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1509490b-6c96-4858-ad8e-a29527fe424f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.383246] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1fdb9582-ad38-44ab-a90f-cd68796a8e56 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "913bba01-e64b-4b52-af94-5effcefc2677" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.850s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.383710] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9bda24c6-f950-47ff-ad3c-ff745291870c] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1796.412503] env[62816]: DEBUG oslo_concurrency.lockutils [req-6b26fdf4-0d30-4f05-b035-7837e8712e3a req-75c44796-ae53-44a7-abbb-bff24efa6b02 service nova] Releasing lock "refresh_cache-bd5482f1-8884-49fa-9e9c-7873eadeefe0" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.413642] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 9972b167-a950-4dba-ac02-068f66300053 could not be found. [ 1796.413838] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1796.414855] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9a120ea3-9c2e-4291-83cc-e17e7dff2314 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.424979] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f4ac6c-7951-4a5a-8e16-eeca5fd979dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.465892] env[62816]: INFO nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] bringing vm to original state: 'stopped' [ 1796.468702] env[62816]: WARNING nova.virt.vmwareapi.vmops [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9972b167-a950-4dba-ac02-068f66300053 could not be found. [ 1796.468903] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1796.469173] env[62816]: INFO nova.compute.manager [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Took 0.12 seconds to destroy the instance on the hypervisor. [ 1796.469482] env[62816]: DEBUG oslo.service.loopingcall [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.470421] env[62816]: DEBUG nova.compute.manager [-] [instance: 9972b167-a950-4dba-ac02-068f66300053] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1796.470531] env[62816]: DEBUG nova.network.neutron [-] [instance: 9972b167-a950-4dba-ac02-068f66300053] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1796.496182] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1796.496446] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1796.498087] env[62816]: INFO nova.compute.claims [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.507136] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f0de64-6aee-eebe-1c27-cced51e24afe, 'name': SearchDatastore_Task, 'duration_secs': 0.032647} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.507390] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.507665] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] bd5482f1-8884-49fa-9e9c-7873eadeefe0/bd5482f1-8884-49fa-9e9c-7873eadeefe0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1796.510273] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a8ee954-1e81-44c7-9dd3-5ea60bfd6901 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.520681] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1796.520681] env[62816]: value = "task-1789002" [ 1796.520681] env[62816]: _type = "Task" [ 1796.520681] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.530585] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.620334] env[62816]: DEBUG nova.network.neutron [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.887816] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0dbf907f-0313-435c-a8be-19f7e48ded76] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1796.893601] env[62816]: WARNING nova.network.neutron [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] 31f515cd-2053-4577-9ed5-9de5fe666946 already exists in list: networks containing: ['31f515cd-2053-4577-9ed5-9de5fe666946']. ignoring it [ 1797.033485] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789002, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.122920] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.391173] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: ba6e94c9-eb58-4040-8e28-f255961e76ca] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1797.473757] env[62816]: DEBUG nova.network.neutron [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "address": "fa:16:3e:ca:91:2e", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ce4cb59-c4", "ovs_interfaceid": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.475997] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1797.476334] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.478033] env[62816]: DEBUG nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1797.478033] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cd53e0-0015-4924-aa64-68209f15d8be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.487383] env[62816]: DEBUG nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1797.490235] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1797.490235] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5fd5c70-3f70-4ba4-a324-e8c56281f12f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.497537] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1797.497537] env[62816]: value = "task-1789003" [ 1797.497537] env[62816]: _type = "Task" [ 1797.497537] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.511290] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789003, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.523263] env[62816]: DEBUG nova.network.neutron [-] [instance: 9972b167-a950-4dba-ac02-068f66300053] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.538684] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541227} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.542418] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] bd5482f1-8884-49fa-9e9c-7873eadeefe0/bd5482f1-8884-49fa-9e9c-7873eadeefe0.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1797.542720] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1797.543209] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2743527-275b-4a51-a9df-4d744f16ae6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.551207] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1797.551207] env[62816]: value = "task-1789004" [ 1797.551207] env[62816]: _type = "Task" [ 1797.551207] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.562903] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789004, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.659204] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1797.659500] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbb8e0fc-40ea-402f-bbdb-e918e06663c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.669095] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1797.669095] env[62816]: value = "task-1789005" [ 1797.669095] env[62816]: _type = "Task" [ 1797.669095] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.678747] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.754745] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88178302-63e1-4f03-b49f-656053613670 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.762852] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb61a2b-50ba-41b2-abd1-3cec98dcaa49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.795702] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e9ab0b-19bd-4fff-b4a7-1849cfec1115 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.804026] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dff9c6b-48fe-4a5e-9564-a2fcc7eafc63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.823077] env[62816]: DEBUG nova.compute.provider_tree [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1797.897898] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 48b74d52-e764-4d14-b372-fc34872205dd] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1797.975023] env[62816]: DEBUG nova.compute.manager [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-changed-2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1797.975182] env[62816]: DEBUG nova.compute.manager [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing instance network info cache due to event network-changed-2ce4cb59-c403-4e6e-8aa0-30efaa416644. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1797.975382] env[62816]: DEBUG oslo_concurrency.lockutils [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.978311] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.978768] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.978986] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.979596] env[62816]: DEBUG oslo_concurrency.lockutils [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.979847] env[62816]: DEBUG nova.network.neutron [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing network info cache for port 2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1797.981827] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4a4231-7173-4e5d-8818-d0a98dafb863 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.003158] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1798.003473] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1798.003740] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.003933] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1798.004098] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.004249] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1798.004452] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1798.004609] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1798.004776] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1798.004940] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1798.005125] env[62816]: DEBUG nova.virt.hardware [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1798.011850] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfiguring VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1798.015573] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0509db9c-d58a-492c-8d9d-8294db6d4b2e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.034438] env[62816]: INFO nova.compute.manager [-] [instance: 9972b167-a950-4dba-ac02-068f66300053] Took 1.56 seconds to deallocate network for instance. [ 1798.034798] env[62816]: DEBUG oslo_vmware.api [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789003, 'name': PowerOffVM_Task, 'duration_secs': 0.225872} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.037914] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1798.038145] env[62816]: DEBUG nova.compute.manager [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1798.038477] env[62816]: DEBUG oslo_vmware.api [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1798.038477] env[62816]: value = "task-1789006" [ 1798.038477] env[62816]: _type = "Task" [ 1798.038477] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.039594] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fcc0942-23f9-4fa4-8f56-f46dca02fdd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.049851] env[62816]: DEBUG oslo_vmware.api [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789006, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.064817] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789004, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079723} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.064817] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1798.064817] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e38bd38-c8fb-4759-abb2-8f7fafbcc724 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.088440] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] bd5482f1-8884-49fa-9e9c-7873eadeefe0/bd5482f1-8884-49fa-9e9c-7873eadeefe0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1798.088755] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7b045b6-c429-4867-8c55-9f1c0979b80c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.111991] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1798.111991] env[62816]: value = "task-1789007" [ 1798.111991] env[62816]: _type = "Task" [ 1798.111991] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.120276] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789007, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.180391] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789005, 'name': PowerOffVM_Task, 'duration_secs': 0.2054} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.180757] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1798.181754] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1584f5-4c27-4021-a7d4-17c793014bc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.205813] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376c2d59-4f16-40d8-a2f6-08db89ade8bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.236881] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1798.237274] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9d19098-e9c8-4739-b2ff-dbaa594728c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.244514] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1798.244514] env[62816]: value = "task-1789008" [ 1798.244514] env[62816]: _type = "Task" [ 1798.244514] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.253444] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.328678] env[62816]: DEBUG nova.scheduler.client.report [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1798.401604] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 946dad01-c012-457d-8bfe-6395ff0aaedf] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1798.552935] env[62816]: DEBUG oslo_vmware.api [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789006, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.560368] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.084s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.594709] env[62816]: INFO nova.compute.manager [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Took 0.56 seconds to detach 1 volumes for instance. [ 1798.597458] env[62816]: DEBUG nova.compute.manager [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] [instance: 9972b167-a950-4dba-ac02-068f66300053] Deleting volume: b605cfce-b06c-4615-a606-12cb89b4a2d4 {{(pid=62816) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1798.623716] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789007, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.759031] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1798.759382] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1798.759658] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.759854] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.760110] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1798.760408] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-040330ea-fbd0-49f8-8a4f-f27e99b05e2e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.769951] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1798.770192] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1798.770971] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fb6747e-0b1e-4c42-9174-a63d384fa5ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.776753] env[62816]: DEBUG nova.network.neutron [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updated VIF entry in instance network info cache for port 2ce4cb59-c403-4e6e-8aa0-30efaa416644. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1798.777174] env[62816]: DEBUG nova.network.neutron [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "address": "fa:16:3e:ca:91:2e", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ce4cb59-c4", "ovs_interfaceid": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.780814] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1798.780814] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52af909d-5707-d790-0666-0adcf09da28e" [ 1798.780814] env[62816]: _type = "Task" [ 1798.780814] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.789882] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52af909d-5707-d790-0666-0adcf09da28e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.838034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.338s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.838034] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1798.905864] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: e003e41d-93e8-4258-b8ca-3c2420b73df0] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1799.053606] env[62816]: DEBUG oslo_vmware.api [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789006, 'name': ReconfigVM_Task, 'duration_secs': 0.820708} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.054203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.054431] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfigured VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1799.075447] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.075637] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.075824] env[62816]: DEBUG nova.objects.instance [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1799.124488] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789007, 'name': ReconfigVM_Task, 'duration_secs': 0.578907} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.124873] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Reconfigured VM instance instance-0000005a to attach disk [datastore1] bd5482f1-8884-49fa-9e9c-7873eadeefe0/bd5482f1-8884-49fa-9e9c-7873eadeefe0.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1799.125709] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99222045-a3d5-4e53-a971-6dd63ac2baad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.132489] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1799.132489] env[62816]: value = "task-1789011" [ 1799.132489] env[62816]: _type = "Task" [ 1799.132489] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.141346] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789011, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.148477] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.284020] env[62816]: DEBUG oslo_concurrency.lockutils [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.284020] env[62816]: DEBUG nova.compute.manager [req-492dcfca-0596-4190-b0dc-8b097af6805e req-8a16f9cb-15b1-4ccb-8a19-ba5e1cef10b5 service nova] [instance: 9972b167-a950-4dba-ac02-068f66300053] Received event network-vif-deleted-6aab8d5d-a76e-4738-8cab-9e6b59a195f0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1799.293062] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52af909d-5707-d790-0666-0adcf09da28e, 'name': SearchDatastore_Task, 'duration_secs': 0.015109} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.294069] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-300cd335-b9ec-4aef-b9e1-18fa4e991a0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.301047] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1799.301047] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52edc132-e9dc-9045-0402-91f0d4e0dc67" [ 1799.301047] env[62816]: _type = "Task" [ 1799.301047] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.309584] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52edc132-e9dc-9045-0402-91f0d4e0dc67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.324540] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "913bba01-e64b-4b52-af94-5effcefc2677" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.325209] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "913bba01-e64b-4b52-af94-5effcefc2677" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.325528] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "913bba01-e64b-4b52-af94-5effcefc2677-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.325820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "913bba01-e64b-4b52-af94-5effcefc2677-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.326097] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "913bba01-e64b-4b52-af94-5effcefc2677-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.328597] env[62816]: INFO nova.compute.manager [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Terminating instance [ 1799.330952] env[62816]: DEBUG nova.compute.manager [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1799.331158] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1799.332016] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c38af06-e3bd-4612-bb4d-a8c546490774 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.341139] env[62816]: DEBUG nova.compute.utils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1799.342902] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1799.343673] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1799.343883] env[62816]: DEBUG nova.network.neutron [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1799.345886] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27e86bdd-f4f8-4776-b6f7-42dc51ccf802 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.353529] env[62816]: DEBUG oslo_vmware.api [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1799.353529] env[62816]: value = "task-1789012" [ 1799.353529] env[62816]: _type = "Task" [ 1799.353529] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.365183] env[62816]: DEBUG oslo_vmware.api [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.400100] env[62816]: DEBUG nova.policy [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5a76991deaec41119f48b61585ad98b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bca8e53dcceb4a5e945cba0a783b2e31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1799.410884] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: afd02433-0912-44ef-8e0e-71d6ee8fbb41] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1799.564563] env[62816]: DEBUG oslo_concurrency.lockutils [None req-776f39c7-0c96-4c81-8b55-b99f15943cd3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.489s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.643113] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789011, 'name': Rename_Task, 'duration_secs': 0.348038} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.643409] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1799.643658] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68d00b38-457c-4c8d-93fb-9ee1b8f00e91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.650078] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1799.650078] env[62816]: value = "task-1789013" [ 1799.650078] env[62816]: _type = "Task" [ 1799.650078] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.658538] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.716083] env[62816]: DEBUG nova.network.neutron [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Successfully created port: 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1799.820125] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52edc132-e9dc-9045-0402-91f0d4e0dc67, 'name': SearchDatastore_Task, 'duration_secs': 0.018779} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.820410] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.820678] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. {{(pid=62816) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1799.821018] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5f75f11-204a-4487-a585-15fa606fec68 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.828336] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1799.828336] env[62816]: value = "task-1789014" [ 1799.828336] env[62816]: _type = "Task" [ 1799.828336] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.838319] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789014, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.844015] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1799.864878] env[62816]: DEBUG oslo_vmware.api [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789012, 'name': PowerOffVM_Task, 'duration_secs': 0.215585} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.865896] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1799.865896] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1799.866071] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7099d762-2643-4fa5-85c9-2fe1ecb153a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.884543] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.884827] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.885529] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "a50b78c5-bb7e-4038-9a74-ecde2042828f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.885529] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1799.885529] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.887989] env[62816]: INFO nova.compute.manager [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Terminating instance [ 1799.890226] env[62816]: DEBUG nova.compute.manager [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1799.890559] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1799.891419] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c733c8-a15d-432b-a0a2-643331a4af41 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.900455] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1799.900781] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-135fe554-aa7f-4f4e-999e-dfef2ac3e89b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.916668] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0707fdd6-2aed-4a09-90e0-c7fb0eae6acf] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1800.010282] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1800.010538] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1800.011165] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Deleting the datastore file [datastore1] 913bba01-e64b-4b52-af94-5effcefc2677 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.011585] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcc27011-5a15-43f3-bbee-ad2a2df74a64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.013825] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1800.014091] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1800.014330] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] a50b78c5-bb7e-4038-9a74-ecde2042828f {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1800.015049] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe8a0992-119d-4e86-8e81-f1a633c655ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.022435] env[62816]: DEBUG oslo_vmware.api [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1800.022435] env[62816]: value = "task-1789017" [ 1800.022435] env[62816]: _type = "Task" [ 1800.022435] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.024059] env[62816]: DEBUG oslo_vmware.api [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1800.024059] env[62816]: value = "task-1789018" [ 1800.024059] env[62816]: _type = "Task" [ 1800.024059] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.036252] env[62816]: DEBUG oslo_vmware.api [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.039461] env[62816]: DEBUG oslo_vmware.api [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.087018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8e35d408-be27-4756-9940-12386f70929b tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.088529] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.940s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.088918] env[62816]: DEBUG nova.objects.instance [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lazy-loading 'resources' on Instance uuid 9972b167-a950-4dba-ac02-068f66300053 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1800.161502] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789013, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.340216] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789014, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.422187] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: d16a99df-f092-4d56-9730-852883bbdb70] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1800.536961] env[62816]: DEBUG oslo_vmware.api [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.478324} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.540407] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1800.541049] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1800.541049] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1800.541049] env[62816]: INFO nova.compute.manager [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1800.541232] env[62816]: DEBUG oslo.service.loopingcall [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.541455] env[62816]: DEBUG oslo_vmware.api [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466841} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.541673] env[62816]: DEBUG nova.compute.manager [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1800.541770] env[62816]: DEBUG nova.network.neutron [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1800.543647] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1800.543745] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1800.543878] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1800.544089] env[62816]: INFO nova.compute.manager [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1800.544319] env[62816]: DEBUG oslo.service.loopingcall [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1800.544517] env[62816]: DEBUG nova.compute.manager [-] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1800.544611] env[62816]: DEBUG nova.network.neutron [-] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1800.665427] env[62816]: DEBUG oslo_vmware.api [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789013, 'name': PowerOnVM_Task, 'duration_secs': 0.632962} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.668035] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1800.668264] env[62816]: INFO nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Took 8.30 seconds to spawn the instance on the hypervisor. [ 1800.668442] env[62816]: DEBUG nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1800.669708] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f253f6-51de-4e1e-b4d1-723497a6af6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.823459] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a4c493-e37e-4de7-8470-921c5e49f465 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.835486] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3513ec98-0605-43be-b087-a620b9841fd8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.844011] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789014, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534669} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.868822] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. [ 1800.870016] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1800.873246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c59aa89-2b7d-4b40-b186-9f6f9a281fd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.876173] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1383c564-8346-4e1d-9024-ead0503e2b1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.905706] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f875a07c-7c89-4982-9989-d46ebc5c7a4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.919098] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1800.923320] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-870db68e-e637-4803-b879-4e50f3879d7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.939082] env[62816]: DEBUG nova.network.neutron [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.944430] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: b409568f-6e04-4218-8a7b-1bbf785115c3] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1800.955329] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1800.955582] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1800.955732] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1800.955912] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1800.956277] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1800.956277] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1800.956475] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1800.956573] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1800.956740] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1800.956904] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1800.957092] env[62816]: DEBUG nova.virt.hardware [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1800.959092] env[62816]: DEBUG nova.compute.provider_tree [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.960833] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e839c9e0-827d-483a-beb8-d43967b9b653 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.963699] env[62816]: DEBUG nova.network.neutron [-] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.968161] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1800.968161] env[62816]: value = "task-1789019" [ 1800.968161] env[62816]: _type = "Task" [ 1800.968161] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.974329] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a3cfcb7-cb15-444b-a814-703fd53876dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.982243] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.032713] env[62816]: DEBUG nova.compute.manager [req-7c4b0812-fe59-42fc-bf23-b5d4d3763397 req-957e47fd-e5cd-483f-ab0f-f9373a120693 service nova] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Received event network-vif-deleted-e11d46ce-b68d-4125-a2ed-0fcfff6c3cd0 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1801.188897] env[62816]: INFO nova.compute.manager [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Took 13.28 seconds to build instance. [ 1801.213206] env[62816]: DEBUG nova.compute.manager [req-b756ef09-7abc-4eb5-b4e9-93c369046b73 req-37b61bb9-600f-409b-a645-140efe59c14a service nova] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Received event network-vif-deleted-76777c72-f001-496f-82bf-037969fdb5b7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1801.310358] env[62816]: DEBUG nova.network.neutron [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Successfully updated port: 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1801.386036] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-8c517e18-ce7f-4b4e-b7bb-375e247abd6c" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.386257] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-8c517e18-ce7f-4b4e-b7bb-375e247abd6c" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.386597] env[62816]: DEBUG nova.objects.instance [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'flavor' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.443620] env[62816]: INFO nova.compute.manager [-] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Took 0.90 seconds to deallocate network for instance. [ 1801.446747] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c6dc008c-6336-4271-9635-a7e0652138e0] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1801.465101] env[62816]: DEBUG nova.scheduler.client.report [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1801.468772] env[62816]: INFO nova.compute.manager [-] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Took 0.92 seconds to deallocate network for instance. [ 1801.482996] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.613518] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.691190] env[62816]: DEBUG oslo_concurrency.lockutils [None req-37b3c17f-a5d7-42a4-bbb4-7b4dd9854624 tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.793s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.813651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.813651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.813862] env[62816]: DEBUG nova.network.neutron [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1801.951781] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4fd2da5f-2867-4eeb-b7ab-8ffd7b096859] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1801.954589] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Acquiring lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.954819] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.955800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Acquiring lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.955800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.956152] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.957953] env[62816]: INFO nova.compute.manager [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Terminating instance [ 1801.959716] env[62816]: DEBUG nova.compute.manager [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1801.959986] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1801.962556] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.962556] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cb7325-94cc-4cd2-a1e9-98d0c3e8c086 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.969966] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.974258] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.361s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.974258] env[62816]: DEBUG nova.objects.instance [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'pci_requests' on Instance uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1801.978514] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.979940] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.980180] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-791deec2-6aef-4d4f-b151-8d1e00669328 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.990093] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.991306] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Waiting for the task: (returnval){ [ 1801.991306] env[62816]: value = "task-1789020" [ 1801.991306] env[62816]: _type = "Task" [ 1801.991306] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.998840] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Task: {'id': task-1789020, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.002892] env[62816]: DEBUG nova.objects.instance [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'pci_requests' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1802.344751] env[62816]: DEBUG nova.network.neutron [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1802.458528] env[62816]: DEBUG nova.network.neutron [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.462266] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 1e3f720c-5a6f-4e7c-aafc-b4680d9667e1] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1802.482945] env[62816]: DEBUG nova.objects.instance [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'numa_topology' on Instance uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1802.484129] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789019, 'name': ReconfigVM_Task, 'duration_secs': 1.210113} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.484394] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfigured VM instance instance-0000004e to attach disk [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1802.485255] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1491aa18-4105-4bb1-8274-d1adbf1ddb8f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.491636] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cd203b57-1a17-4ff5-892e-8cdb698589e0 tempest-ServerActionsV293TestJSON-1229099144 tempest-ServerActionsV293TestJSON-1229099144-project-member] Lock "9972b167-a950-4dba-ac02-068f66300053" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.143s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.517579] env[62816]: DEBUG nova.objects.base [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1802.517829] env[62816]: DEBUG nova.network.neutron [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1802.525123] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c330c37f-4dec-4ead-819f-883e7a697611 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.542454] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Task: {'id': task-1789020, 'name': PowerOffVM_Task, 'duration_secs': 0.247829} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.543333] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1802.543618] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1802.543957] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5919fb5-1008-4f65-b5bc-7287b016c6b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.548162] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1802.548162] env[62816]: value = "task-1789021" [ 1802.548162] env[62816]: _type = "Task" [ 1802.548162] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.560087] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789021, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.617957] env[62816]: DEBUG nova.policy [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1802.672419] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1802.672650] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1802.672802] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Deleting the datastore file [datastore1] bd5482f1-8884-49fa-9e9c-7873eadeefe0 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1802.673456] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef23274b-68b5-48c1-8b1a-1231566a92a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.680045] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Waiting for the task: (returnval){ [ 1802.680045] env[62816]: value = "task-1789023" [ 1802.680045] env[62816]: _type = "Task" [ 1802.680045] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.688234] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Task: {'id': task-1789023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.961797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1802.962162] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Instance network_info: |[{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1802.962599] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:5c:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c4f15cf-a845-46c3-a9e8-8f650fb6a58b', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1802.969975] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Creating folder: Project (bca8e53dcceb4a5e945cba0a783b2e31). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1802.970428] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: e1067d45-1938-4021-b902-21a1aa57058a] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1802.972267] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1966133e-c652-4c67-9b8c-a91bf5c7798d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.983425] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Created folder: Project (bca8e53dcceb4a5e945cba0a783b2e31) in parent group-v370905. [ 1802.983629] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Creating folder: Instances. Parent ref: group-v371165. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1802.983908] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2b56f439-daa9-4243-965e-0805404bfcc7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.989107] env[62816]: INFO nova.compute.claims [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1802.993583] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Created folder: Instances in parent group-v371165. [ 1802.993583] env[62816]: DEBUG oslo.service.loopingcall [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.993980] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1802.994214] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14c8b4fd-7791-4a64-a542-d8553c803112 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.015463] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1803.015463] env[62816]: value = "task-1789026" [ 1803.015463] env[62816]: _type = "Task" [ 1803.015463] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.024685] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789026, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.056876] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789021, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.189780] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Task: {'id': task-1789023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.241023] env[62816]: DEBUG nova.compute.manager [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-vif-plugged-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1803.241369] env[62816]: DEBUG oslo_concurrency.lockutils [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] Acquiring lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1803.241595] env[62816]: DEBUG oslo_concurrency.lockutils [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.241870] env[62816]: DEBUG oslo_concurrency.lockutils [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.242145] env[62816]: DEBUG nova.compute.manager [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] No waiting events found dispatching network-vif-plugged-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1803.242404] env[62816]: WARNING nova.compute.manager [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received unexpected event network-vif-plugged-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b for instance with vm_state building and task_state spawning. [ 1803.242658] env[62816]: DEBUG nova.compute.manager [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1803.242893] env[62816]: DEBUG nova.compute.manager [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing instance network info cache due to event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1803.243185] env[62816]: DEBUG oslo_concurrency.lockutils [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1803.243400] env[62816]: DEBUG oslo_concurrency.lockutils [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1803.243625] env[62816]: DEBUG nova.network.neutron [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1803.475122] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 1c3392d3-cfb0-47c6-9366-8c363ad21297] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1803.525257] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789026, 'name': CreateVM_Task} progress is 15%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.561267] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789021, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.692785] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Task: {'id': task-1789023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.978187] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0a1a8539-940a-4a17-9826-82736be41892] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1804.031457] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789026, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.065426] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789021, 'name': ReconfigVM_Task, 'duration_secs': 1.192834} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.065733] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1804.066222] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-41a1051e-90e4-45b6-9cbb-5de9605f0ab3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.075316] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1804.075316] env[62816]: value = "task-1789027" [ 1804.075316] env[62816]: _type = "Task" [ 1804.075316] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.081657] env[62816]: DEBUG nova.network.neutron [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updated VIF entry in instance network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1804.082018] env[62816]: DEBUG nova.network.neutron [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1804.086408] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.192633] env[62816]: DEBUG oslo_vmware.api [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Task: {'id': task-1789023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.138482} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.192893] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1804.193472] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1804.193713] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1804.193897] env[62816]: INFO nova.compute.manager [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Took 2.23 seconds to destroy the instance on the hypervisor. [ 1804.194230] env[62816]: DEBUG oslo.service.loopingcall [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1804.194493] env[62816]: DEBUG nova.compute.manager [-] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1804.194721] env[62816]: DEBUG nova.network.neutron [-] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1804.236115] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6e4e60-b8c4-4aed-9364-8bcf18f9557f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.245341] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d72c1ba-1814-4842-9b2f-73df8b7d9977 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.275412] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbadb096-d473-499c-b27e-d462bf98c413 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.286152] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5173dd88-5a67-4929-be31-ba057a2aa544 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.299625] env[62816]: DEBUG nova.compute.provider_tree [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1804.444019] env[62816]: DEBUG nova.network.neutron [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Successfully updated port: 8c517e18-ce7f-4b4e-b7bb-375e247abd6c {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1804.481281] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0e0261fe-4376-487c-9d54-c4f37577409c] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1804.527035] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789026, 'name': CreateVM_Task, 'duration_secs': 1.124152} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.527226] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1804.527898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.528468] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.528468] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1804.528623] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50fc74f2-0d45-454e-b5aa-5aa85b78d4ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.534235] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1804.534235] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b2582c-eda8-abb5-6d90-56263c0ad700" [ 1804.534235] env[62816]: _type = "Task" [ 1804.534235] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.546297] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b2582c-eda8-abb5-6d90-56263c0ad700, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.585130] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789027, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.588065] env[62816]: DEBUG oslo_concurrency.lockutils [req-d741101a-3c61-4a9e-8c88-4e52ad57d525 req-0436235e-6d60-49c2-a462-64e5957e07ac service nova] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.803593] env[62816]: DEBUG nova.scheduler.client.report [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1804.943054] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.943054] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.943054] env[62816]: DEBUG nova.network.neutron [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1804.984971] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 6767c231-2dcb-4d19-ae7c-5b026d48ed26] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1805.043933] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b2582c-eda8-abb5-6d90-56263c0ad700, 'name': SearchDatastore_Task, 'duration_secs': 0.071404} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.044358] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.044600] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1805.044832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.044979] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.045179] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1805.045443] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fec28d47-66ac-4ecf-a32b-1c53f22b428d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.054483] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1805.054705] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1805.055487] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-904ec476-b0bb-453a-97cc-d31814bdc788 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.061189] env[62816]: DEBUG nova.network.neutron [-] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.062581] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1805.062581] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5219bb9a-c0eb-226e-f229-24512f273e67" [ 1805.062581] env[62816]: _type = "Task" [ 1805.062581] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.071730] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5219bb9a-c0eb-226e-f229-24512f273e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.085879] env[62816]: DEBUG oslo_vmware.api [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789027, 'name': PowerOnVM_Task, 'duration_secs': 0.556962} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.086168] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1805.089142] env[62816]: DEBUG nova.compute.manager [None req-2042764c-25c6-4676-aea2-c0079114f34a tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1805.089823] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24617ec8-3d7d-449b-90b8-ce1b4c2e8b29 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.308562] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.334s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.310990] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.349s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.311374] env[62816]: DEBUG nova.objects.instance [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'resources' on Instance uuid a50b78c5-bb7e-4038-9a74-ecde2042828f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1805.340427] env[62816]: DEBUG nova.compute.manager [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-vif-plugged-8c517e18-ce7f-4b4e-b7bb-375e247abd6c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1805.340686] env[62816]: DEBUG oslo_concurrency.lockutils [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.340953] env[62816]: DEBUG oslo_concurrency.lockutils [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.341192] env[62816]: DEBUG oslo_concurrency.lockutils [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1805.341399] env[62816]: DEBUG nova.compute.manager [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] No waiting events found dispatching network-vif-plugged-8c517e18-ce7f-4b4e-b7bb-375e247abd6c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1805.341543] env[62816]: WARNING nova.compute.manager [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received unexpected event network-vif-plugged-8c517e18-ce7f-4b4e-b7bb-375e247abd6c for instance with vm_state active and task_state None. [ 1805.341734] env[62816]: DEBUG nova.compute.manager [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-changed-8c517e18-ce7f-4b4e-b7bb-375e247abd6c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1805.341927] env[62816]: DEBUG nova.compute.manager [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing instance network info cache due to event network-changed-8c517e18-ce7f-4b4e-b7bb-375e247abd6c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1805.342126] env[62816]: DEBUG oslo_concurrency.lockutils [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.350427] env[62816]: INFO nova.network.neutron [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating port d0353b95-1d3d-4eab-9c03-374679fe2118 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1805.487496] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: cf6ff174-1324-42bd-a77a-905b9a333c27] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1805.490651] env[62816]: WARNING nova.network.neutron [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] 31f515cd-2053-4577-9ed5-9de5fe666946 already exists in list: networks containing: ['31f515cd-2053-4577-9ed5-9de5fe666946']. ignoring it [ 1805.490867] env[62816]: WARNING nova.network.neutron [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] 31f515cd-2053-4577-9ed5-9de5fe666946 already exists in list: networks containing: ['31f515cd-2053-4577-9ed5-9de5fe666946']. ignoring it [ 1805.564124] env[62816]: INFO nova.compute.manager [-] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Took 1.37 seconds to deallocate network for instance. [ 1805.579513] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5219bb9a-c0eb-226e-f229-24512f273e67, 'name': SearchDatastore_Task, 'duration_secs': 0.014128} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.582959] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e2d826a-41f3-4fcb-a066-362a470bfc42 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.588391] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1805.588391] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b3c82c-bfdc-854f-0fbc-b49191d6dfbb" [ 1805.588391] env[62816]: _type = "Task" [ 1805.588391] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.596950] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b3c82c-bfdc-854f-0fbc-b49191d6dfbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.993698] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 42093232-a4e5-4cc3-ab1c-a0023a91e102] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1806.015069] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd80aa70-92ed-4f33-8836-2286ea7ad9da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.021578] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668cb783-75da-4917-8e8b-ab5c1b592d8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.053316] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2b3982-29d8-4f4a-b3c8-de63a766bbc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.061125] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba465a8f-24ad-41a0-8d24-25a8056c809f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.076827] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.077367] env[62816]: DEBUG nova.compute.provider_tree [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1806.088044] env[62816]: DEBUG nova.network.neutron [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "address": "fa:16:3e:ca:91:2e", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ce4cb59-c4", "ovs_interfaceid": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8c517e18-ce7f-4b4e-b7bb-375e247abd6c", "address": "fa:16:3e:df:31:b7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c517e18-ce", "ovs_interfaceid": "8c517e18-ce7f-4b4e-b7bb-375e247abd6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.098832] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b3c82c-bfdc-854f-0fbc-b49191d6dfbb, 'name': SearchDatastore_Task, 'duration_secs': 0.045655} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.099115] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.099378] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/c9ebcce1-8374-46fb-996f-c271cb8dbf84.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1806.099634] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d74f4fe-b27c-484f-93e7-b85a80fc8612 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.107242] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1806.107242] env[62816]: value = "task-1789028" [ 1806.107242] env[62816]: _type = "Task" [ 1806.107242] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.115153] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.497986] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 679cd9a3-2ed6-451f-b934-ba7738913959] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1806.581067] env[62816]: DEBUG nova.scheduler.client.report [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1806.590809] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.591490] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.591664] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.591983] env[62816]: DEBUG oslo_concurrency.lockutils [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.592216] env[62816]: DEBUG nova.network.neutron [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Refreshing network info cache for port 8c517e18-ce7f-4b4e-b7bb-375e247abd6c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1806.593944] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb42070-8096-48fb-b34f-f7f8c867bf28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.615567] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1806.615816] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1806.615975] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1806.616181] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1806.616375] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1806.616583] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1806.616743] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1806.617437] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1806.617437] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1806.617437] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1806.617437] env[62816]: DEBUG nova.virt.hardware [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1806.623945] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfiguring VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1806.627648] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a89d3d7c-1480-4f64-99aa-804445b610c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.645104] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789028, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.646401] env[62816]: DEBUG oslo_vmware.api [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1806.646401] env[62816]: value = "task-1789029" [ 1806.646401] env[62816]: _type = "Task" [ 1806.646401] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.654586] env[62816]: DEBUG oslo_vmware.api [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789029, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.001321] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: fb84cb48-d1a1-4eec-adb8-8edc585263df] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1807.029473] env[62816]: INFO nova.compute.manager [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Unrescuing [ 1807.029745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.029901] env[62816]: DEBUG oslo_concurrency.lockutils [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.030227] env[62816]: DEBUG nova.network.neutron [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1807.085645] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.775s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.088127] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.108s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.088381] env[62816]: DEBUG nova.objects.instance [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lazy-loading 'resources' on Instance uuid 913bba01-e64b-4b52-af94-5effcefc2677 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1807.108127] env[62816]: INFO nova.scheduler.client.report [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted allocations for instance a50b78c5-bb7e-4038-9a74-ecde2042828f [ 1807.129527] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789028, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.156529] env[62816]: DEBUG oslo_vmware.api [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789029, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.176655] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.176655] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.176655] env[62816]: DEBUG nova.network.neutron [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1807.370040] env[62816]: DEBUG nova.compute.manager [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-vif-plugged-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.370338] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.370577] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.370751] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.370951] env[62816]: DEBUG nova.compute.manager [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] No waiting events found dispatching network-vif-plugged-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1807.371452] env[62816]: WARNING nova.compute.manager [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received unexpected event network-vif-plugged-d0353b95-1d3d-4eab-9c03-374679fe2118 for instance with vm_state shelved_offloaded and task_state spawning. [ 1807.371646] env[62816]: DEBUG nova.compute.manager [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.371852] env[62816]: DEBUG nova.compute.manager [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing instance network info cache due to event network-changed-d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1807.372142] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] Acquiring lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.402030] env[62816]: DEBUG nova.network.neutron [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updated VIF entry in instance network info cache for port 8c517e18-ce7f-4b4e-b7bb-375e247abd6c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1807.402576] env[62816]: DEBUG nova.network.neutron [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "address": "fa:16:3e:ca:91:2e", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ce4cb59-c4", "ovs_interfaceid": "2ce4cb59-c403-4e6e-8aa0-30efaa416644", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8c517e18-ce7f-4b4e-b7bb-375e247abd6c", "address": "fa:16:3e:df:31:b7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c517e18-ce", "ovs_interfaceid": "8c517e18-ce7f-4b4e-b7bb-375e247abd6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1807.504563] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f6ddaab3-d420-4ee4-bf75-486228826635] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1807.619491] env[62816]: DEBUG oslo_concurrency.lockutils [None req-130f3abc-1265-4582-be23-223379efa6a1 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "a50b78c5-bb7e-4038-9a74-ecde2042828f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.735s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.628228] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789028, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.166168} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.628488] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/c9ebcce1-8374-46fb-996f-c271cb8dbf84.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1807.628703] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1807.629077] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f90fda8a-67ac-4c62-8851-1b61c933e462 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.637580] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1807.637580] env[62816]: value = "task-1789030" [ 1807.637580] env[62816]: _type = "Task" [ 1807.637580] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.656499] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789030, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.664398] env[62816]: DEBUG oslo_vmware.api [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789029, 'name': ReconfigVM_Task, 'duration_secs': 0.710891} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.665344] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.665561] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfigured VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1807.782935] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78a43ea-79cc-40fe-a988-d4ff4b684cfa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.790667] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb48eb75-f9df-43ac-b1a5-8221aff79e24 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.823328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82415da-2b89-475e-892b-9be11a190b80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.832278] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f389f185-9e7c-46aa-8b85-9243482971a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.846636] env[62816]: DEBUG nova.compute.provider_tree [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.905152] env[62816]: DEBUG oslo_concurrency.lockutils [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.905596] env[62816]: DEBUG nova.compute.manager [req-8ac6ca9b-f6a9-45dd-a6d6-ce23c381047a req-e5c093d9-48df-47a7-8f1d-cfa8bfb4960b service nova] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Received event network-vif-deleted-1808e3a6-539d-4c7f-8c88-b2ed44ad1368 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1808.008632] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 11a4d835-c149-49f0-8e4f-b3f9a7f1afca] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1808.148660] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789030, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077467} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.148660] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1808.149269] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c78eba-5710-4d92-be9c-e097c2576da4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.173572] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/c9ebcce1-8374-46fb-996f-c271cb8dbf84.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1808.175354] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f7ba289b-b4e7-4fb1-b854-80fc5220973d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-8c517e18-ce7f-4b4e-b7bb-375e247abd6c" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.789s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.176338] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a6b3b93-f518-4664-8c63-0e706313b2b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.203345] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1808.203345] env[62816]: value = "task-1789031" [ 1808.203345] env[62816]: _type = "Task" [ 1808.203345] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.213934] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789031, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.250877] env[62816]: DEBUG nova.network.neutron [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.351963] env[62816]: DEBUG nova.scheduler.client.report [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1808.373378] env[62816]: DEBUG nova.network.neutron [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.512065] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0b10aca0-950b-46f6-8367-5cb9ea7540c8] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1808.714038] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789031, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.754197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.754929] env[62816]: DEBUG nova.objects.instance [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'flavor' on Instance uuid b9e8af08-9579-4dbf-8ea1-35ffab75e159 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.856939] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.860058] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.783s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.860357] env[62816]: DEBUG nova.objects.instance [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lazy-loading 'resources' on Instance uuid bd5482f1-8884-49fa-9e9c-7873eadeefe0 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1808.876103] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.878749] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] Acquired lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.879447] env[62816]: DEBUG nova.network.neutron [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Refreshing network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1808.882479] env[62816]: INFO nova.scheduler.client.report [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Deleted allocations for instance 913bba01-e64b-4b52-af94-5effcefc2677 [ 1808.899181] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4b3b553f127898e44d60c98c910878fc',container_format='bare',created_at=2024-12-12T02:57:06Z,direct_url=,disk_format='vmdk',id=74b5ba28-84d4-460e-9f4d-6cb94c84b4ea,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1409215635-shelved',owner='138797faa4144ecbad6956e126963199',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-12-12T02:57:22Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1808.899427] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1808.899583] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.899780] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1808.899906] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.900099] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1808.900324] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1808.900482] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1808.900646] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1808.900807] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1808.900988] env[62816]: DEBUG nova.virt.hardware [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1808.902267] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc98125-a51c-4c7c-8340-9eb6773f0a15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.911652] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05d86335-5826-41d9-805d-47418df0517c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.926083] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:e4:26', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0353b95-1d3d-4eab-9c03-374679fe2118', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1808.933813] env[62816]: DEBUG oslo.service.loopingcall [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1808.934811] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1808.935077] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac6d0fb4-0509-4939-9711-10ad02fe7f2e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.955556] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1808.955556] env[62816]: value = "task-1789032" [ 1808.955556] env[62816]: _type = "Task" [ 1808.955556] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.965958] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789032, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.014930] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 3fc6366b-76db-4bb1-ae1f-67b3ad5e1cbd] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1809.219452] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789031, 'name': ReconfigVM_Task, 'duration_secs': 0.553351} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.219820] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Reconfigured VM instance instance-0000005b to attach disk [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/c9ebcce1-8374-46fb-996f-c271cb8dbf84.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.221215] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d977742b-ce03-439d-9f99-5960011c4d2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.230369] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1809.230369] env[62816]: value = "task-1789033" [ 1809.230369] env[62816]: _type = "Task" [ 1809.230369] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.243216] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789033, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.266346] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc13966-c16d-4f01-95f8-f0c0d445db5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.270197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-2ce4cb59-c403-4e6e-8aa0-30efaa416644" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.270497] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-2ce4cb59-c403-4e6e-8aa0-30efaa416644" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.294971] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1809.296165] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69ef994b-228d-4cb1-890f-f58a795f4e0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.304349] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1809.304349] env[62816]: value = "task-1789034" [ 1809.304349] env[62816]: _type = "Task" [ 1809.304349] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.317307] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.391250] env[62816]: DEBUG oslo_concurrency.lockutils [None req-43045502-0c39-4ea3-9e09-a4c4173d36c1 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "913bba01-e64b-4b52-af94-5effcefc2677" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.066s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.467918] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789032, 'name': CreateVM_Task, 'duration_secs': 0.4415} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.474025] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1809.474025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.474025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.474025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1809.474025] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f12d10d-89ce-48ef-8cd2-9c8e90392fe8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.478097] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1809.478097] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526ecb7e-36c2-515d-ebe6-1129302ee9c7" [ 1809.478097] env[62816]: _type = "Task" [ 1809.478097] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.488233] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526ecb7e-36c2-515d-ebe6-1129302ee9c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.520983] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.521384] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances with incomplete migration {{(pid=62816) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1809.600847] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ada0d760-b819-4817-b5d6-832c0a6d3f1d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.611151] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96498fe5-4723-42db-bde4-c20322451b55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.647992] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01f9161-b21c-44b2-acc8-4405d6ad8ad3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.656400] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e15823-95b6-46eb-9f3d-fd4b86d1e828 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.671688] env[62816]: DEBUG nova.compute.provider_tree [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.689267] env[62816]: DEBUG nova.network.neutron [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updated VIF entry in instance network info cache for port d0353b95-1d3d-4eab-9c03-374679fe2118. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1809.689643] env[62816]: DEBUG nova.network.neutron [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [{"id": "d0353b95-1d3d-4eab-9c03-374679fe2118", "address": "fa:16:3e:67:e4:26", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0353b95-1d", "ovs_interfaceid": "d0353b95-1d3d-4eab-9c03-374679fe2118", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1809.743368] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789033, 'name': Rename_Task, 'duration_secs': 0.29206} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.743670] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1809.743916] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62632c29-9ce8-407e-8b46-a9d71400a1cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.751035] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1809.751035] env[62816]: value = "task-1789036" [ 1809.751035] env[62816]: _type = "Task" [ 1809.751035] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.760165] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.774576] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.774759] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.775658] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0a998b-f50b-4fa1-87d6-84ce60cbca58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.797039] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65089af-ecec-4765-8449-bc74c170d9d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.827540] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfiguring VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1809.831109] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecbb526b-28e6-4a98-b95a-486ad240afba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.850977] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789034, 'name': PowerOffVM_Task, 'duration_secs': 0.424696} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.852362] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.857681] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfiguring VM instance instance-0000004e to detach disk 2002 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1809.858031] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1809.858031] env[62816]: value = "task-1789037" [ 1809.858031] env[62816]: _type = "Task" [ 1809.858031] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.858236] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86b4c940-1ae5-4f94-ae76-b1304bf04282 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.880987] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.882284] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1809.882284] env[62816]: value = "task-1789038" [ 1809.882284] env[62816]: _type = "Task" [ 1809.882284] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.891480] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789038, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.988939] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.988939] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Processing image 74b5ba28-84d4-460e-9f4d-6cb94c84b4ea {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1809.988939] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.989602] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.989602] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1809.989782] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cff4967c-4bff-47db-a5dc-751cae73bf02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.007410] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1810.007627] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1810.008563] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e6ee0b0-4484-4dd0-bc0b-94cb25bdd888 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.015014] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1810.015014] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527ffdec-57fe-cc4b-26a9-45bd0885cc3b" [ 1810.015014] env[62816]: _type = "Task" [ 1810.015014] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.022707] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527ffdec-57fe-cc4b-26a9-45bd0885cc3b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.025274] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1810.116543] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5202a574-4959-9cb5-0cbd-7f0966868c29/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1810.117481] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6d2c83-9657-4beb-b2ed-0047eb5e9d98 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.123713] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5202a574-4959-9cb5-0cbd-7f0966868c29/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1810.123881] env[62816]: ERROR oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5202a574-4959-9cb5-0cbd-7f0966868c29/disk-0.vmdk due to incomplete transfer. [ 1810.124131] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4650ab83-831b-414d-8ef5-9f1aa173bcfc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.137936] env[62816]: DEBUG oslo_vmware.rw_handles [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5202a574-4959-9cb5-0cbd-7f0966868c29/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1810.138157] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Uploaded image 1d23fc8e-ee35-45ba-ad66-5e83269e15a0 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1810.140482] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1810.140735] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-39cf8980-bbff-486f-9d42-c2c359417307 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.146792] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1810.146792] env[62816]: value = "task-1789039" [ 1810.146792] env[62816]: _type = "Task" [ 1810.146792] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.154160] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789039, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.177720] env[62816]: DEBUG nova.scheduler.client.report [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1810.192276] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9a302e7-7dfe-43bc-a1b0-ed0a62610ad2 req-7f192ce4-8d33-4fb3-99b9-8a5ae8a4b555 service nova] Releasing lock "refresh_cache-f97ea34e-792e-4023-bd2f-549dba129925" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.262484] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.382296] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.391560] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789038, 'name': ReconfigVM_Task, 'duration_secs': 0.343155} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.392089] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfigured VM instance instance-0000004e to detach disk 2002 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1810.392162] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1810.392483] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3c62a5a-1dd4-4fb4-afac-c5f481c1b1e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.400270] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1810.400270] env[62816]: value = "task-1789040" [ 1810.400270] env[62816]: _type = "Task" [ 1810.400270] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.409927] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789040, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.525053] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Preparing fetch location {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1810.525396] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Fetch image to [datastore1] OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b/OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b.vmdk {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1810.525396] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Downloading stream optimized image 74b5ba28-84d4-460e-9f4d-6cb94c84b4ea to [datastore1] OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b/OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b.vmdk on the data store datastore1 as vApp {{(pid=62816) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1810.525516] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Downloading image file data 74b5ba28-84d4-460e-9f4d-6cb94c84b4ea to the ESX as VM named 'OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b' {{(pid=62816) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1810.602044] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1810.602044] env[62816]: value = "resgroup-9" [ 1810.602044] env[62816]: _type = "ResourcePool" [ 1810.602044] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1810.602044] env[62816]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-850f177e-c766-4b70-8a4a-bf58f4b927b4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.622992] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease: (returnval){ [ 1810.622992] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525b9371-049c-de9c-be07-c89de9ec29ac" [ 1810.622992] env[62816]: _type = "HttpNfcLease" [ 1810.622992] env[62816]: } obtained for vApp import into resource pool (val){ [ 1810.622992] env[62816]: value = "resgroup-9" [ 1810.622992] env[62816]: _type = "ResourcePool" [ 1810.622992] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1810.623257] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the lease: (returnval){ [ 1810.623257] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525b9371-049c-de9c-be07-c89de9ec29ac" [ 1810.623257] env[62816]: _type = "HttpNfcLease" [ 1810.623257] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1810.629273] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1810.629273] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525b9371-049c-de9c-be07-c89de9ec29ac" [ 1810.629273] env[62816]: _type = "HttpNfcLease" [ 1810.629273] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1810.656669] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789039, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.684851] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.825s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.713649] env[62816]: INFO nova.scheduler.client.report [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Deleted allocations for instance bd5482f1-8884-49fa-9e9c-7873eadeefe0 [ 1810.762287] env[62816]: DEBUG oslo_vmware.api [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789036, 'name': PowerOnVM_Task, 'duration_secs': 0.67795} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.762638] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.762847] env[62816]: INFO nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Took 9.89 seconds to spawn the instance on the hypervisor. [ 1810.763044] env[62816]: DEBUG nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1810.763880] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c7036f-686b-4ef3-8401-229bc28344d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.808283] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.808513] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.883161] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.909179] env[62816]: DEBUG oslo_vmware.api [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789040, 'name': PowerOnVM_Task, 'duration_secs': 0.412097} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.909461] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1810.909693] env[62816]: DEBUG nova.compute.manager [None req-397d2304-042d-4f7d-8820-0631d1de2bc3 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1810.910508] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00670758-4a92-4c89-9ba3-c977384d3c91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.131738] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1811.131738] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525b9371-049c-de9c-be07-c89de9ec29ac" [ 1811.131738] env[62816]: _type = "HttpNfcLease" [ 1811.131738] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1811.155932] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789039, 'name': Destroy_Task, 'duration_secs': 0.648273} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.156220] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroyed the VM [ 1811.156507] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1811.156749] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-46a31e6a-a11e-4006-9f0e-fd82008ac923 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.162535] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1811.162535] env[62816]: value = "task-1789042" [ 1811.162535] env[62816]: _type = "Task" [ 1811.162535] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.169711] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789042, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.221855] env[62816]: DEBUG oslo_concurrency.lockutils [None req-46bf7fbc-6a2d-4890-bd1c-c36671b11c53 tempest-DeleteServersAdminTestJSON-2045734698 tempest-DeleteServersAdminTestJSON-2045734698-project-admin] Lock "bd5482f1-8884-49fa-9e9c-7873eadeefe0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.267s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.283168] env[62816]: INFO nova.compute.manager [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Took 14.81 seconds to build instance. [ 1811.311123] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1811.382728] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.633267] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1811.633267] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525b9371-049c-de9c-be07-c89de9ec29ac" [ 1811.633267] env[62816]: _type = "HttpNfcLease" [ 1811.633267] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1811.633609] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1811.633609] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525b9371-049c-de9c-be07-c89de9ec29ac" [ 1811.633609] env[62816]: _type = "HttpNfcLease" [ 1811.633609] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1811.634999] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e67cde00-9c8d-4fe2-8e1e-927fcfc1d7bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.642378] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a40b-ce21-46fb-a162-e25798fa90b0/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1811.642602] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a40b-ce21-46fb-a162-e25798fa90b0/disk-0.vmdk. {{(pid=62816) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1811.712746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.712859] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.720821] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-42dcd820-a3bc-415d-b9eb-3cf91a1ecc73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.727538] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789042, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.785116] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ed9d5763-d61b-4c86-b150-d7084946b187 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.329s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.835033] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.835033] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.835033] env[62816]: INFO nova.compute.claims [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1811.883714] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.215809] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1812.218922] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789042, 'name': RemoveSnapshot_Task} progress is 42%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.223219] env[62816]: DEBUG nova.compute.manager [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1812.223418] env[62816]: DEBUG nova.compute.manager [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing instance network info cache due to event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1812.223692] env[62816]: DEBUG oslo_concurrency.lockutils [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.223863] env[62816]: DEBUG oslo_concurrency.lockutils [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.224043] env[62816]: DEBUG nova.network.neutron [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1812.384496] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.405327] env[62816]: INFO nova.compute.manager [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Rescuing [ 1812.405611] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.405745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.405916] env[62816]: DEBUG nova.network.neutron [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1812.522708] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1812.714859] env[62816]: DEBUG oslo_vmware.api [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789042, 'name': RemoveSnapshot_Task, 'duration_secs': 1.099139} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.716930] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1812.717224] env[62816]: INFO nova.compute.manager [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 22.19 seconds to snapshot the instance on the hypervisor. [ 1812.745560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.887119] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.894213] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Completed reading data from the image iterator. {{(pid=62816) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1812.894446] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a40b-ce21-46fb-a162-e25798fa90b0/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1812.895626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b50f24-b275-419a-bc60-9008191d5eb8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.901735] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a40b-ce21-46fb-a162-e25798fa90b0/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1812.901911] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a40b-ce21-46fb-a162-e25798fa90b0/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1812.904566] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-a2570934-8d2e-44ce-a4b2-0312412f5287 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.032020] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a750afd8-e0ce-46fa-80ea-56f99c61dc7c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.032803] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_power_states {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1813.040247] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49f6959-af79-42ed-a5f9-cec183aa6ed2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.079069] env[62816]: DEBUG nova.network.neutron [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updated VIF entry in instance network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1813.079069] env[62816]: DEBUG nova.network.neutron [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.080249] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6122b3a3-6eee-4f82-a580-b7a092ea544a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.088775] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c10a1f-96df-473f-96c4-cddf6b93334a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.105853] env[62816]: DEBUG nova.compute.provider_tree [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.119675] env[62816]: DEBUG oslo_vmware.rw_handles [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b1a40b-ce21-46fb-a162-e25798fa90b0/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1813.119891] env[62816]: INFO nova.virt.vmwareapi.images [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Downloaded image file data 74b5ba28-84d4-460e-9f4d-6cb94c84b4ea [ 1813.121021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bfd2535-e150-4ee0-9fe1-fb2f9e0df55a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.139212] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e19f8f44-35e6-4b24-a25a-d52df4d55d3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.163892] env[62816]: INFO nova.virt.vmwareapi.images [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] The imported VM was unregistered [ 1813.166501] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Caching image {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1813.166765] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating directory with path [datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1813.167070] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da507766-f435-4231-a067-416a32c0db7f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.178176] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created directory with path [datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1813.178487] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b/OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b.vmdk to [datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk. {{(pid=62816) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1813.178647] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-300acac7-d28d-4a48-b21f-957899525d72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.185205] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1813.185205] env[62816]: value = "task-1789044" [ 1813.185205] env[62816]: _type = "Task" [ 1813.185205] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.193326] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.246560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.246922] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.247328] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.247472] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.247700] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.249894] env[62816]: INFO nova.compute.manager [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Terminating instance [ 1813.252124] env[62816]: DEBUG nova.compute.manager [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1813.252317] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1813.253165] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e07f37b3-36d5-4089-9e6f-aa8a44882eb2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.263122] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1813.264061] env[62816]: DEBUG nova.network.neutron [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1813.265105] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6587701-5ec6-42c2-a64f-17dd03ea8eb8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.272629] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1813.272629] env[62816]: value = "task-1789045" [ 1813.272629] env[62816]: _type = "Task" [ 1813.272629] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.283565] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.285841] env[62816]: DEBUG nova.compute.manager [None req-0eec4ad7-305a-40c5-95bb-675b91457b7c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Found 2 images (rotation: 2) {{(pid=62816) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1813.386585] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.539230] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Getting list of instances from cluster (obj){ [ 1813.539230] env[62816]: value = "domain-c8" [ 1813.539230] env[62816]: _type = "ClusterComputeResource" [ 1813.539230] env[62816]: } {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1813.540402] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990dcf8d-8eb3-44c7-b9fa-22c4df1b0aac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.559727] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Got total of 11 instances {{(pid=62816) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1813.559892] env[62816]: WARNING nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] While synchronizing instance power states, found 12 instances in the database and 11 instances on the hypervisor. [ 1813.560033] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid 8105e650-8482-40c6-bd7a-b8daea19a0d5 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.560303] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.560471] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.560627] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid c66fa160-d4dd-429f-8751-f36cb2020ff1 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561062] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid 543d69d2-0694-4d57-bbae-f8851ff0f0dc {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561062] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561193] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid 4ab07a21-2685-42bc-af13-b95473993d6f {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561272] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid b9e8af08-9579-4dbf-8ea1-35ffab75e159 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561414] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561578] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid fa719ff5-0219-485f-aac7-2cde4bbef8f6 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561729] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid c9ebcce1-8374-46fb-996f-c271cb8dbf84 {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.561872] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Triggering sync for uuid e26b6593-7e64-4a43-b09d-92d2e668c25b {{(pid=62816) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1813.562279] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.562686] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.562742] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.563052] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.563245] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.563455] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.563720] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.564042] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.564147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.564326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.564537] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "4ab07a21-2685-42bc-af13-b95473993d6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.564735] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "4ab07a21-2685-42bc-af13-b95473993d6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.564959] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.565147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.565435] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.565561] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.565777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.566007] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.566197] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.566367] env[62816]: INFO nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] During sync_power_state the instance has a pending task (rescuing). Skip. [ 1813.566532] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.566837] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.567630] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcfceec-a48f-4049-85a5-4985fd875213 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.571143] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c561f8e-50f8-478c-814f-061880b12526 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.574012] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb42823-dc30-4653-aa8c-66ff7526642e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.577317] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2feacd3b-2fac-4a23-a49f-24b200e0ae6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.580239] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7415166e-f554-4575-b983-ba14ccb693b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.582940] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86dd4147-2d43-4459-a4c3-e82601b441a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.585815] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c63915-78f6-4e55-bf04-8d52572c6c93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.588353] env[62816]: DEBUG oslo_concurrency.lockutils [req-12152ee3-2436-4c3c-9fe0-4a841aafed59 req-bacd8f68-a261-4d6e-adea-3c4f5b095188 service nova] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.609856] env[62816]: DEBUG nova.scheduler.client.report [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1813.696588] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.768412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.782952] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789045, 'name': PowerOffVM_Task, 'duration_secs': 0.291877} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.783270] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1813.783493] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1813.783800] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68250d88-061d-4cf8-b2ba-2fbc26767af2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.888150] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.932181] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1813.932468] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1813.932655] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Deleting the datastore file [datastore1] 8105e650-8482-40c6-bd7a-b8daea19a0d5 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1813.932932] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16dd6641-b63d-4f90-a9c2-021d766378eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.941371] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for the task: (returnval){ [ 1813.941371] env[62816]: value = "task-1789047" [ 1813.941371] env[62816]: _type = "Task" [ 1813.941371] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.952262] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.109608] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.544s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.110009] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "4ab07a21-2685-42bc-af13-b95473993d6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.545s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.110511] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.110858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.111443] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.549s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.114272] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.114778] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1814.117652] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.372s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.118998] env[62816]: INFO nova.compute.claims [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1814.121998] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.558s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.122188] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.558s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.196992] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.304927] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1814.305428] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-000282f4-03d6-430b-b76d-a3b404ddf532 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.315367] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1814.315367] env[62816]: value = "task-1789048" [ 1814.315367] env[62816]: _type = "Task" [ 1814.315367] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.324505] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.371535] env[62816]: DEBUG nova.compute.manager [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1814.372331] env[62816]: DEBUG nova.compute.manager [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing instance network info cache due to event network-changed-3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1814.372443] env[62816]: DEBUG oslo_concurrency.lockutils [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] Acquiring lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1814.372630] env[62816]: DEBUG oslo_concurrency.lockutils [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] Acquired lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1814.372812] env[62816]: DEBUG nova.network.neutron [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Refreshing network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1814.391582] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.451382] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.623337] env[62816]: DEBUG nova.compute.utils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1814.626606] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1814.626807] env[62816]: DEBUG nova.network.neutron [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1814.668699] env[62816]: DEBUG nova.policy [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0a2129bc83a45d695730796b55f1caf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72d49b085afa4df99700ea4e15e9c87e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1814.695946] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.827495] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.888343] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.897025] env[62816]: DEBUG nova.compute.manager [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1814.898314] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8220df80-301a-45e7-8a1b-aceaef0a7144 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.953876] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.030963] env[62816]: DEBUG nova.network.neutron [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Successfully created port: 464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1815.130029] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1815.199029] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.309536] env[62816]: DEBUG nova.network.neutron [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updated VIF entry in instance network info cache for port 3f45a830-39df-4031-a603-7b72a5562ec6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1815.309973] env[62816]: DEBUG nova.network.neutron [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [{"id": "3f45a830-39df-4031-a603-7b72a5562ec6", "address": "fa:16:3e:71:2b:cf", "network": {"id": "c4f8816b-f710-4808-8345-cab5c5344057", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1203956084-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12767255c02a4e16ad13383fdb725593", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f45a830-39", "ovs_interfaceid": "3f45a830-39df-4031-a603-7b72a5562ec6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1815.331016] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.332672] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac337cc-5d2d-4264-8872-3e8a8d40f916 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.339604] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114e0594-ed83-4c2c-9955-0e8a3b536dbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.369973] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91de9c3-462c-4735-b646-75f09adef095 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.380657] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97788783-b106-4690-a9ed-3cb39d976cc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.398027] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.403436] env[62816]: DEBUG nova.compute.provider_tree [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1815.408181] env[62816]: INFO nova.compute.manager [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] instance snapshotting [ 1815.409029] env[62816]: DEBUG nova.objects.instance [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'flavor' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1815.456205] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.697075] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.813440] env[62816]: DEBUG oslo_concurrency.lockutils [req-1d337a6b-835a-48e2-8a39-3d365ea1a87c req-112d788d-bdb2-4e4f-87e3-728677654861 service nova] Releasing lock "refresh_cache-b9e8af08-9579-4dbf-8ea1-35ffab75e159" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.832064] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789048, 'name': PowerOffVM_Task, 'duration_secs': 1.373643} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.832453] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1815.833344] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d1be07d-1453-474d-b2fc-e3f8e70d9031 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.855238] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7f027c-4639-41da-b753-6caa98bd96e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.890701] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1815.890966] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-888b0db6-9ae6-4f59-8532-d1bb4f45ef21 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.897836] env[62816]: DEBUG oslo_vmware.api [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789037, 'name': ReconfigVM_Task, 'duration_secs': 5.963452} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.898973] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.899205] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfigured VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1815.901203] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1815.901203] env[62816]: value = "task-1789049" [ 1815.901203] env[62816]: _type = "Task" [ 1815.901203] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.901464] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "dd833e38-691c-4757-9c6b-659c74343d3e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.338s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1815.902239] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b809f48-755c-4555-be31-81242ea490a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.907884] env[62816]: DEBUG nova.scheduler.client.report [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1815.915920] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96e5064f-ae5a-4475-a428-c8c049c4058a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.924710] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1815.924710] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1815.924869] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.925018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.925357] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1815.945118] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c362d17-f91e-4013-a3a5-3db87c41d4f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.956746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67a79f1-aae2-4a98-b9d2-10def09bfb9e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.975753] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.978754] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1815.979057] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1815.979843] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42c220e8-8617-4a86-802f-957602050e4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.986759] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1815.986759] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a91263-ceef-0508-f081-85bf29f95925" [ 1815.986759] env[62816]: _type = "Task" [ 1815.986759] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.995421] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a91263-ceef-0508-f081-85bf29f95925, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.144858] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1816.168033] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1816.168033] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1816.168033] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1816.168033] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1816.168033] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1816.168287] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1816.168414] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1816.168590] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1816.168812] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1816.168923] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1816.169145] env[62816]: DEBUG nova.virt.hardware [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1816.170015] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da009f9f-6101-443b-8059-7372b9557629 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.175676] env[62816]: DEBUG nova.compute.manager [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-vif-deleted-2ce4cb59-c403-4e6e-8aa0-30efaa416644 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1816.175676] env[62816]: INFO nova.compute.manager [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Neutron deleted interface 2ce4cb59-c403-4e6e-8aa0-30efaa416644; detaching it from the instance and deleting it from the info cache [ 1816.175816] env[62816]: DEBUG nova.network.neutron [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8c517e18-ce7f-4b4e-b7bb-375e247abd6c", "address": "fa:16:3e:df:31:b7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c517e18-ce", "ovs_interfaceid": "8c517e18-ce7f-4b4e-b7bb-375e247abd6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.183305] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390a8f2a-dd7e-403f-875d-7d8e6504821e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.205739] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.416515] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.417221] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1816.424222] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "dd833e38-691c-4757-9c6b-659c74343d3e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.523s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.462395] env[62816]: DEBUG oslo_vmware.api [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Task: {'id': task-1789047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.51485} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.463531] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1816.463774] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1816.464058] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1816.464286] env[62816]: INFO nova.compute.manager [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Took 3.21 seconds to destroy the instance on the hypervisor. [ 1816.464683] env[62816]: DEBUG oslo.service.loopingcall [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1816.465241] env[62816]: DEBUG nova.compute.manager [-] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1816.465347] env[62816]: DEBUG nova.network.neutron [-] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1816.477800] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1816.478138] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-067bced0-5bd8-49aa-9941-31ac41140b27 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.484597] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1816.484597] env[62816]: value = "task-1789050" [ 1816.484597] env[62816]: _type = "Task" [ 1816.484597] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.497245] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.501162] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a91263-ceef-0508-f081-85bf29f95925, 'name': SearchDatastore_Task, 'duration_secs': 0.153874} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.501978] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ed0b747-f0c3-439f-90c5-ed956c09782b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.507768] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1816.507768] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5247df6e-11f0-0764-16f0-c43a99ccf200" [ 1816.507768] env[62816]: _type = "Task" [ 1816.507768] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.515425] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5247df6e-11f0-0764-16f0-c43a99ccf200, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.678927] env[62816]: DEBUG oslo_concurrency.lockutils [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1816.679154] env[62816]: DEBUG oslo_concurrency.lockutils [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] Acquired lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1816.679989] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753376db-144d-46a4-88c0-27e6608ba43e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.699276] env[62816]: DEBUG oslo_concurrency.lockutils [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] Releasing lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.699555] env[62816]: WARNING nova.compute.manager [req-3824ef1a-59b6-4941-9c80-89733cd78bf1 req-00eab96c-86ff-4321-b6c2-445b2db52080 service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Detach interface failed, port_id=2ce4cb59-c403-4e6e-8aa0-30efaa416644, reason: No device with interface-id 2ce4cb59-c403-4e6e-8aa0-30efaa416644 exists on VM: nova.exception.NotFound: No device with interface-id 2ce4cb59-c403-4e6e-8aa0-30efaa416644 exists on VM [ 1816.707425] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.922528] env[62816]: DEBUG nova.compute.utils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1816.924019] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1816.924214] env[62816]: DEBUG nova.network.neutron [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1816.994856] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.012382] env[62816]: DEBUG nova.policy [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da85accbf2ae484aafdf85030398de3e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e34ce2b6acac4ef08fd6b7d37dabef09', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1817.019016] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5247df6e-11f0-0764-16f0-c43a99ccf200, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.084256] env[62816]: DEBUG nova.network.neutron [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Successfully updated port: 464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1817.129608] env[62816]: DEBUG nova.compute.manager [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-vif-deleted-8c517e18-ce7f-4b4e-b7bb-375e247abd6c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1817.129794] env[62816]: INFO nova.compute.manager [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Neutron deleted interface 8c517e18-ce7f-4b4e-b7bb-375e247abd6c; detaching it from the instance and deleting it from the info cache [ 1817.130069] env[62816]: DEBUG nova.network.neutron [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.150600] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.150795] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.151012] env[62816]: DEBUG nova.network.neutron [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1817.215677] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.304924] env[62816]: DEBUG nova.network.neutron [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Successfully created port: 99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1817.427354] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1817.495189] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.516956] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5247df6e-11f0-0764-16f0-c43a99ccf200, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.587210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.587359] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.587566] env[62816]: DEBUG nova.network.neutron [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1817.634010] env[62816]: DEBUG oslo_concurrency.lockutils [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.634088] env[62816]: DEBUG oslo_concurrency.lockutils [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Acquired lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.634930] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0d1d08-b9a3-4868-b0b7-9fec07c6d90d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.656676] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62796197-0bba-4b48-9fe4-f5745480cbf6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.682704] env[62816]: DEBUG nova.virt.vmwareapi.vmops [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfiguring VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1817.682992] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36e14605-fd3f-49f5-ad1d-713b95bc6a30 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.697665] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.704404] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Waiting for the task: (returnval){ [ 1817.704404] env[62816]: value = "task-1789051" [ 1817.704404] env[62816]: _type = "Task" [ 1817.704404] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.710505] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.716385] env[62816]: DEBUG nova.network.neutron [-] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.926473] env[62816]: DEBUG nova.network.neutron [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [{"id": "bed0373b-9c6a-4357-a640-8218a972cb72", "address": "fa:16:3e:9b:90:47", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbed0373b-9c", "ovs_interfaceid": "bed0373b-9c6a-4357-a640-8218a972cb72", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1817.995907] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.017726] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5247df6e-11f0-0764-16f0-c43a99ccf200, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.128978] env[62816]: DEBUG nova.network.neutron [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1818.198277] env[62816]: DEBUG nova.compute.manager [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Received event network-vif-plugged-464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1818.198495] env[62816]: DEBUG oslo_concurrency.lockutils [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.198698] env[62816]: DEBUG oslo_concurrency.lockutils [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.198867] env[62816]: DEBUG oslo_concurrency.lockutils [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.199053] env[62816]: DEBUG nova.compute.manager [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] No waiting events found dispatching network-vif-plugged-464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1818.199222] env[62816]: WARNING nova.compute.manager [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Received unexpected event network-vif-plugged-464c5ce0-30b5-473d-910e-343ba514ffa7 for instance with vm_state building and task_state spawning. [ 1818.199417] env[62816]: DEBUG nova.compute.manager [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Received event network-changed-464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1818.199555] env[62816]: DEBUG nova.compute.manager [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Refreshing instance network info cache due to event network-changed-464c5ce0-30b5-473d-910e-343ba514ffa7. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1818.199672] env[62816]: DEBUG oslo_concurrency.lockutils [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] Acquiring lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.211085] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.215827] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.218295] env[62816]: INFO nova.compute.manager [-] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Took 1.75 seconds to deallocate network for instance. [ 1818.301535] env[62816]: DEBUG nova.network.neutron [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating instance_info_cache with network_info: [{"id": "464c5ce0-30b5-473d-910e-343ba514ffa7", "address": "fa:16:3e:46:53:f1", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c5ce0-30", "ovs_interfaceid": "464c5ce0-30b5-473d-910e-343ba514ffa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.429633] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.436167] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1818.461879] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1818.462148] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1818.462311] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1818.462499] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1818.462649] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1818.462796] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1818.463013] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1818.463181] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1818.463349] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1818.463510] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1818.463687] env[62816]: DEBUG nova.virt.hardware [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1818.464550] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fec53494-0685-4c82-94ed-130c55474051 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.473150] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f533e826-0e5c-4d9f-9933-248902439d32 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.494908] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.519020] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5247df6e-11f0-0764-16f0-c43a99ccf200, 'name': SearchDatastore_Task, 'duration_secs': 1.747731} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.519020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.519216] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. {{(pid=62816) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1818.519468] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8992c3b3-880d-4fbf-9983-603474404865 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.525755] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1818.525755] env[62816]: value = "task-1789052" [ 1818.525755] env[62816]: _type = "Task" [ 1818.525755] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.532781] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.715431] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789044, 'name': MoveVirtualDisk_Task, 'duration_secs': 5.209557} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.719017] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b/OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b.vmdk to [datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk. [ 1818.719285] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Cleaning up location [datastore1] OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1818.719506] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_16063dc8-89e8-410e-8ecd-47be0239f61b {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1818.720520] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.720520] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e90b50c-b655-45dd-a0b9-8ea2c68cd1ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.727348] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1818.727348] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1818.727348] env[62816]: DEBUG nova.objects.instance [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lazy-loading 'resources' on Instance uuid 8105e650-8482-40c6-bd7a-b8daea19a0d5 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1818.729113] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1818.729113] env[62816]: value = "task-1789053" [ 1818.729113] env[62816]: _type = "Task" [ 1818.729113] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.738370] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.800495] env[62816]: DEBUG nova.network.neutron [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Successfully updated port: 99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1818.804239] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.804555] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance network_info: |[{"id": "464c5ce0-30b5-473d-910e-343ba514ffa7", "address": "fa:16:3e:46:53:f1", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c5ce0-30", "ovs_interfaceid": "464c5ce0-30b5-473d-910e-343ba514ffa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1818.804893] env[62816]: DEBUG oslo_concurrency.lockutils [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] Acquired lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.805519] env[62816]: DEBUG nova.network.neutron [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Refreshing network info cache for port 464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1818.806257] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:53:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '464c5ce0-30b5-473d-910e-343ba514ffa7', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1818.813952] env[62816]: DEBUG oslo.service.loopingcall [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.818063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "refresh_cache-642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.818224] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired lock "refresh_cache-642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.818369] env[62816]: DEBUG nova.network.neutron [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1818.819292] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1818.819900] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-631e0e1c-5aec-43b1-9a3f-d84b0f088f1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.840055] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1818.840055] env[62816]: value = "task-1789054" [ 1818.840055] env[62816]: _type = "Task" [ 1818.840055] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.848225] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789054, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.934756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb6ed978-8afd-42ff-af8f-1b570700f66d tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-dd833e38-691c-4757-9c6b-659c74343d3e-2ce4cb59-c403-4e6e-8aa0-30efaa416644" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.663s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.000802] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.039532] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789052, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.115783] env[62816]: DEBUG nova.network.neutron [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updated VIF entry in instance network info cache for port 464c5ce0-30b5-473d-910e-343ba514ffa7. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1819.116240] env[62816]: DEBUG nova.network.neutron [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating instance_info_cache with network_info: [{"id": "464c5ce0-30b5-473d-910e-343ba514ffa7", "address": "fa:16:3e:46:53:f1", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c5ce0-30", "ovs_interfaceid": "464c5ce0-30b5-473d-910e-343ba514ffa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.161302] env[62816]: DEBUG nova.compute.manager [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Received event network-vif-plugged-99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1819.161526] env[62816]: DEBUG oslo_concurrency.lockutils [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] Acquiring lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.161734] env[62816]: DEBUG oslo_concurrency.lockutils [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.161910] env[62816]: DEBUG oslo_concurrency.lockutils [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.164272] env[62816]: DEBUG nova.compute.manager [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] No waiting events found dispatching network-vif-plugged-99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1819.164526] env[62816]: WARNING nova.compute.manager [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Received unexpected event network-vif-plugged-99350a4f-7d49-4b0d-be97-689bb89aba9c for instance with vm_state building and task_state spawning. [ 1819.164710] env[62816]: DEBUG nova.compute.manager [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Received event network-changed-99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1819.164872] env[62816]: DEBUG nova.compute.manager [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Refreshing instance network info cache due to event network-changed-99350a4f-7d49-4b0d-be97-689bb89aba9c. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1819.165058] env[62816]: DEBUG oslo_concurrency.lockutils [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] Acquiring lock "refresh_cache-642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.215361] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.242999] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789053, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.358358] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789054, 'name': CreateVM_Task, 'duration_secs': 0.365903} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.358837] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1819.359388] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.359601] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.359952] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1819.360343] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83157d15-9f38-4239-992d-2485ef976262 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.365438] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1819.365438] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b47fe7-dca7-184b-082c-e97e132c7923" [ 1819.365438] env[62816]: _type = "Task" [ 1819.365438] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.369324] env[62816]: DEBUG nova.network.neutron [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1819.376242] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b47fe7-dca7-184b-082c-e97e132c7923, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.498961] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.535723] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789052, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.574028] env[62816]: DEBUG nova.network.neutron [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Updating instance_info_cache with network_info: [{"id": "99350a4f-7d49-4b0d-be97-689bb89aba9c", "address": "fa:16:3e:cb:54:1f", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99350a4f-7d", "ovs_interfaceid": "99350a4f-7d49-4b0d-be97-689bb89aba9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.601830] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48f8851-a1b0-44a0-ac52-f30aa4783e36 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.609937] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b6d815-7471-4da7-9c7f-156ec40c481f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.640437] env[62816]: DEBUG oslo_concurrency.lockutils [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] Releasing lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.640689] env[62816]: DEBUG nova.compute.manager [req-23c39f60-ebc4-4c06-bd60-e4f851d3b3d0 req-b928a8ed-c6c6-4dbd-944a-0d2e49ab96a9 service nova] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Received event network-vif-deleted-a4838985-0fbb-4554-a869-57339a03546a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1819.642033] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c676d3fb-9da7-4ae3-82ca-d10e7d7f9761 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.649204] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24af6e1-d85c-42e9-a763-9e72611771c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.662174] env[62816]: DEBUG nova.compute.provider_tree [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1819.715877] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.743728] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789053, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.663461} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.743955] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1819.744104] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.744357] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk to [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1819.744623] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c98e9078-0ef9-4141-9e09-c21472078b02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.750984] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1819.750984] env[62816]: value = "task-1789055" [ 1819.750984] env[62816]: _type = "Task" [ 1819.750984] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.758496] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.875770] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b47fe7-dca7-184b-082c-e97e132c7923, 'name': SearchDatastore_Task, 'duration_secs': 0.190034} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.876096] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.876339] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1819.876630] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.876787] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.876971] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1819.877251] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19c7081b-a3ca-4d77-b1b8-bc1e15418b5e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.885928] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1819.886118] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1819.886814] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9340932b-e71b-4686-9536-6db3f8c44954 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.891514] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1819.891514] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]524f9fd4-6747-b847-179a-b28a05fd7d8c" [ 1819.891514] env[62816]: _type = "Task" [ 1819.891514] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.898568] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524f9fd4-6747-b847-179a-b28a05fd7d8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.998290] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.037054] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789052, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.375263} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.037054] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk. [ 1820.037448] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dadc31b-f6ed-4ebd-9556-838958dae453 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.062392] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1820.062715] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-842dd4aa-ea7c-497a-9bc0-3854f6a5cf31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.077417] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Releasing lock "refresh_cache-642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.077739] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Instance network_info: |[{"id": "99350a4f-7d49-4b0d-be97-689bb89aba9c", "address": "fa:16:3e:cb:54:1f", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99350a4f-7d", "ovs_interfaceid": "99350a4f-7d49-4b0d-be97-689bb89aba9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1820.078070] env[62816]: DEBUG oslo_concurrency.lockutils [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] Acquired lock "refresh_cache-642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.078291] env[62816]: DEBUG nova.network.neutron [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Refreshing network info cache for port 99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1820.079496] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:54:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8039f411-8c97-48fe-a5a9-9f5a42e4e7c6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99350a4f-7d49-4b0d-be97-689bb89aba9c', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1820.087228] env[62816]: DEBUG oslo.service.loopingcall [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1820.088722] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1820.089057] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-850d3cd5-254f-43a7-aec5-6382ed04a12f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.108057] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1820.108057] env[62816]: value = "task-1789056" [ 1820.108057] env[62816]: _type = "Task" [ 1820.108057] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.113115] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1820.113115] env[62816]: value = "task-1789057" [ 1820.113115] env[62816]: _type = "Task" [ 1820.113115] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.120796] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.125697] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.164872] env[62816]: DEBUG nova.scheduler.client.report [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1820.216719] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.260683] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.402272] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]524f9fd4-6747-b847-179a-b28a05fd7d8c, 'name': SearchDatastore_Task, 'duration_secs': 0.012657} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.403111] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f25cae43-6242-4b51-9c76-6a2390da32f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.408393] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1820.408393] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5262afbc-627d-0187-93ec-c07af631b37d" [ 1820.408393] env[62816]: _type = "Task" [ 1820.408393] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.416508] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5262afbc-627d-0187-93ec-c07af631b37d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.498066] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789050, 'name': CreateSnapshot_Task, 'duration_secs': 3.526879} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.498549] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1820.499424] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8e88ab0-07c6-4ab9-a414-3f14a97d4d93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.621950] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.625144] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.670027] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.701753] env[62816]: INFO nova.scheduler.client.report [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Deleted allocations for instance 8105e650-8482-40c6-bd7a-b8daea19a0d5 [ 1820.719622] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.763520] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.855123] env[62816]: DEBUG nova.network.neutron [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Updated VIF entry in instance network info cache for port 99350a4f-7d49-4b0d-be97-689bb89aba9c. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1820.855580] env[62816]: DEBUG nova.network.neutron [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Updating instance_info_cache with network_info: [{"id": "99350a4f-7d49-4b0d-be97-689bb89aba9c", "address": "fa:16:3e:cb:54:1f", "network": {"id": "6cda6284-d326-430d-a483-d8ddbf5d3248", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.80", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "b785e717cfe540028c6aa1636fe2ce35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8039f411-8c97-48fe-a5a9-9f5a42e4e7c6", "external-id": "nsx-vlan-transportzone-12", "segmentation_id": 12, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99350a4f-7d", "ovs_interfaceid": "99350a4f-7d49-4b0d-be97-689bb89aba9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.921077] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5262afbc-627d-0187-93ec-c07af631b37d, 'name': SearchDatastore_Task, 'duration_secs': 0.131781} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.921077] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.921313] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1820.921575] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5f632bba-4966-4c26-a5cc-9e26cb80cac6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.673853] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.680078] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1821.683244] env[62816]: DEBUG oslo_concurrency.lockutils [req-e4305567-2fea-4775-bfa1-64624901aee2 req-599d1ccd-0926-438b-96b7-00e756f14d20 service nova] Releasing lock "refresh_cache-642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.688046] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1a2fb791-7a69-4b12-9fce-8b1c5d821e52 tempest-VolumesAdminNegativeTest-1786328548 tempest-VolumesAdminNegativeTest-1786328548-project-member] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.438s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.688046] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1821.688046] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1821.688046] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f141f95d-0555-4f75-b628-be931bc2cd9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.701058] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1821.701058] env[62816]: value = "task-1789058" [ 1821.701058] env[62816]: _type = "Task" [ 1821.701058] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.701312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.139s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.701506] env[62816]: INFO nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1821.701677] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "8105e650-8482-40c6-bd7a-b8daea19a0d5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.718989] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.719300] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.719580] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1821.719580] env[62816]: value = "task-1789059" [ 1821.719580] env[62816]: _type = "Task" [ 1821.719580] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.719766] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.719972] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.727590] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.733457] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 11%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.199088] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.207919] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.208600] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.208827] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.218492] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.231549] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 34%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.480261] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.480574] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.480739] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.702347] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.702347] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.706516] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.710812] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.719545] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.731253] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.205015] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.205394] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.209251] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.214487] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.221391] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.231741] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.455908] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1823.456106] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1823.703033] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.703033] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.705770] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.709740] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.721488] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.731346] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.203706] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.203920] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.207156] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.210966] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.221266] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.232236] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.705133] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.715129] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.715624] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.721324] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.726459] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.734855] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.208250] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.208549] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.208757] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.213867] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.227537] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.235770] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.468207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.468401] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.468565] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1825.702135] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.708433] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.712681] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.715380] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.723919] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.734937] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.203642] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.203962] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.209572] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.213955] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.223857] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.235154] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.676434] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.705124] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.705443] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.711065] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.715450] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.724301] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.735275] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.179336] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.179564] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1827.179798] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.179966] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.180137] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1827.206267] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.206540] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.214009] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.219824] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.229872] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.238331] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.682996] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.683218] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.683389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1827.683587] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1827.684471] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471433a6-58c4-4f2d-a50a-5cba07628b56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.698574] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4dfb256-1601-4207-b46d-d9f7bbdde147 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.708295] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.720787] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.724202] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc7cdf4-0df0-45db-97a6-1b1262cfd5c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.726431] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789056, 'name': ReconfigVM_Task, 'duration_secs': 7.228609} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.729551] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Reconfigured VM instance instance-0000005b to attach disk [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84/844838ed-b150-482e-a0f6-dcce37470b52-rescue.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1827.731058] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11dc8ff-963b-439e-8ad6-bb12ae54cde9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.740079] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.743910] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62fafbf8-ed0b-41f4-88a4-db910d79ddb8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.746728] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.769520] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8fc3353b-8bc1-4303-8bda-4bf7480c8c7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.779098] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.803353] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179897MB free_disk=160GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1827.803481] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.803671] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.810595] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1827.810595] env[62816]: value = "task-1789060" [ 1827.810595] env[62816]: _type = "Task" [ 1827.810595] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.817798] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.204707] env[62816]: DEBUG oslo_vmware.api [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Task: {'id': task-1789051, 'name': ReconfigVM_Task, 'duration_secs': 10.409175} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.207604] env[62816]: DEBUG oslo_concurrency.lockutils [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] Releasing lock "dd833e38-691c-4757-9c6b-659c74343d3e" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.207893] env[62816]: DEBUG nova.virt.vmwareapi.vmops [req-85d123e7-57d5-48f4-9386-d06eec0aa4dd req-543404ba-5af3-4f93-bb57-8a3d3153dbdd service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Reconfigured VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1828.208348] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.208605] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "dd833e38-691c-4757-9c6b-659c74343d3e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 10.511s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.208826] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.209037] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.209282] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "dd833e38-691c-4757-9c6b-659c74343d3e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.213567] env[62816]: INFO nova.compute.manager [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Terminating instance [ 1828.215448] env[62816]: DEBUG nova.compute.manager [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1828.215648] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1828.216359] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d95940d-dc9c-413c-abe9-5136cd89c087 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.221589] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.225409] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1828.227990] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-87b5f540-1d33-4a1b-87a0-3b133b8c0029 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.237358] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1828.237358] env[62816]: value = "task-1789061" [ 1828.237358] env[62816]: _type = "Task" [ 1828.237358] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.237592] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.244238] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.248966] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.321945] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.702566] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.719384] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.732795] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.740064] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.746840] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789061, 'name': PowerOffVM_Task, 'duration_secs': 0.234479} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.747111] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1828.747286] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1828.747511] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3fd301e-7590-4e60-8d69-bdccc730ad0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.825522] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.837451] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 9745413b-2bd8-45d7-8491-483e4921b59c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.837624] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance dd833e38-691c-4757-9c6b-659c74343d3e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.837748] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c66fa160-d4dd-429f-8751-f36cb2020ff1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.837866] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 543d69d2-0694-4d57-bbae-f8851ff0f0dc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.837982] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance d03ed540-5c20-4bcb-ac7e-eec8c09e4451 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838118] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4ab07a21-2685-42bc-af13-b95473993d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838233] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b9e8af08-9579-4dbf-8ea1-35ffab75e159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838357] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance fa719ff5-0219-485f-aac7-2cde4bbef8f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838509] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance f97ea34e-792e-4023-bd2f-549dba129925 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838620] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c9ebcce1-8374-46fb-996f-c271cb8dbf84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838732] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance e26b6593-7e64-4a43-b09d-92d2e668c25b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.838842] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1828.839051] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1828.839190] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2816MB phys_disk=200GB used_disk=12GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1828.966603] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeee459d-4cdd-4c0c-b4ce-64bda0e0f7f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.974575] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e59547e-5331-4e5b-bbc9-2153476092fe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.004385] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8149f9-0e7a-4492-a20a-e2cae4ec14ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.011323] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86cc2430-be90-4321-9fcf-2f91e5ba46f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.024517] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1829.192640] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1829.192869] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1829.193066] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleting the datastore file [datastore1] dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1829.193333] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bbf986f3-7ee0-40bf-894d-da7c0542a2d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.206127] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.207130] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1829.207130] env[62816]: value = "task-1789063" [ 1829.207130] env[62816]: _type = "Task" [ 1829.207130] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.214524] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.225106] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.233275] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.239980] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.323723] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.528178] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1829.705633] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.715538] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.722686] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.734787] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.742035] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.822497] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.033691] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1830.033902] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.230s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.204653] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.216513] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.223719] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.235146] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.242077] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.323577] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.706742] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.717474] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.724995] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.735462] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.742759] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.824125] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.205921] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.218016] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.225903] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.236701] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.244515] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.324603] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.676244] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.676373] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.707389] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789057, 'name': CreateVM_Task, 'duration_secs': 11.10878} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.708150] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1831.708426] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.708655] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.709039] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1831.709345] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7630f399-7e03-49b6-acf8-d865b20af942 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.719803] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.720377] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1831.720377] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529ec3bb-d589-dbc0-0e1b-c52f577548b3" [ 1831.720377] env[62816]: _type = "Task" [ 1831.720377] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.730248] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ec3bb-d589-dbc0-0e1b-c52f577548b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.735318] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.742059] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.746535] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.825334] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.180347] env[62816]: DEBUG nova.compute.utils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1832.220689] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.230567] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.235529] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ec3bb-d589-dbc0-0e1b-c52f577548b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.242233] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.246718] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.325629] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.683824] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.720702] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.734235] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789055, 'name': CopyVirtualDisk_Task, 'duration_secs': 12.949463} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.734465] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529ec3bb-d589-dbc0-0e1b-c52f577548b3, 'name': SearchDatastore_Task, 'duration_secs': 0.976137} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.737144] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea/74b5ba28-84d4-460e-9f4d-6cb94c84b4ea.vmdk to [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1832.737495] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.737720] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1832.737945] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.738103] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.738279] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1832.739018] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03f9194-7ade-4808-80d0-6b680d051a80 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.741373] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffe9eac5-c3d0-4c17-bc60-46e7ea7e518a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.750408] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.768918] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1832.772835] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9420d02-4ca7-450c-b2b2-f88b0ba946e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.786840] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.787014] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1832.787182] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1832.787917] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abf4c64c-11b8-49eb-a004-dee5ed664a53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.794236] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1832.794236] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52bf92e7-fd14-80cf-70ce-a496414efea2" [ 1832.794236] env[62816]: _type = "Task" [ 1832.794236] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.798503] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1832.798503] env[62816]: value = "task-1789064" [ 1832.798503] env[62816]: _type = "Task" [ 1832.798503] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.804369] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bf92e7-fd14-80cf-70ce-a496414efea2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.809405] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789064, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.826255] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.222424] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.249196] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.253426] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.306843] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52bf92e7-fd14-80cf-70ce-a496414efea2, 'name': SearchDatastore_Task, 'duration_secs': 0.136806} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.308109] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf696837-781d-4afc-bc94-8982bf387c11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.313329] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789064, 'name': ReconfigVM_Task, 'duration_secs': 0.300575} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.313955] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfigured VM instance instance-00000050 to attach disk [datastore1] f97ea34e-792e-4023-bd2f-549dba129925/f97ea34e-792e-4023-bd2f-549dba129925.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.315399] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_format': None, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'guest_format': None, 'size': 0, 'disk_bus': None, 'image_id': '844838ed-b150-482e-a0f6-dcce37470b52'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371164', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'name': 'volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'f97ea34e-792e-4023-bd2f-549dba129925', 'attached_at': '', 'detached_at': '', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'serial': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2'}, 'disk_bus': None, 'delete_on_termination': False, 'attachment_id': 'fa9fcf07-6b53-42ad-b97d-5ec36ba509b2', 'volume_type': None}], 'swap': None} {{(pid=62816) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1833.315621] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1833.315824] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371164', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'name': 'volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'f97ea34e-792e-4023-bd2f-549dba129925', 'attached_at': '', 'detached_at': '', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'serial': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1833.317917] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed95da1-ac3e-43f2-a827-3d5e9b2c28fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.320460] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1833.320460] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525c8e73-c05e-0f26-5496-08f85ee318f8" [ 1833.320460] env[62816]: _type = "Task" [ 1833.320460] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.338583] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ad6303-018e-430e-8806-464b8aaa608a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.346567] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525c8e73-c05e-0f26-5496-08f85ee318f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.346813] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.369634] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2/volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1833.369957] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b12521b-d3b1-47e1-ae2c-316e84e21ebf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.387820] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1833.387820] env[62816]: value = "task-1789065" [ 1833.387820] env[62816]: _type = "Task" [ 1833.387820] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.396988] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789065, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.721967] env[62816]: DEBUG oslo_vmware.api [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 4.414715} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.721967] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1833.722156] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1833.722236] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1833.722410] env[62816]: INFO nova.compute.manager [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Took 5.51 seconds to destroy the instance on the hypervisor. [ 1833.722660] env[62816]: DEBUG oslo.service.loopingcall [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1833.722896] env[62816]: DEBUG nova.compute.manager [-] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1833.723025] env[62816]: DEBUG nova.network.neutron [-] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1833.745405] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789058, 'name': CopyVirtualDisk_Task, 'duration_secs': 11.778917} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.745978] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1833.746209] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1833.746452] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c21f795-f927-4c2c-b697-9ff712e99542 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.751796] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.752060] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.752301] env[62816]: INFO nova.compute.manager [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Attaching volume 8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa to /dev/sdb [ 1833.753693] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.755123] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1833.755123] env[62816]: value = "task-1789066" [ 1833.755123] env[62816]: _type = "Task" [ 1833.755123] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.763128] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789066, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.789315] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae18e93e-4a71-4738-bf9e-44ade15a5af4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.796657] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8ae74a-b59b-4149-848f-86de6aa81b41 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.809816] env[62816]: DEBUG nova.virt.block_device [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updating existing volume attachment record: 0db01edb-4ff2-43a7-8f8f-25d491da0072 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1833.832378] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789060, 'name': ReconfigVM_Task, 'duration_secs': 5.581195} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.835820] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1833.836155] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525c8e73-c05e-0f26-5496-08f85ee318f8, 'name': SearchDatastore_Task, 'duration_secs': 0.115959} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.836333] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da4e577a-d9c6-49cb-90d4-f54aa600a924 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.837831] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.838104] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a/642a07d7-8d15-4874-9dbe-bb9aa29e4d8a.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1833.838338] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc63a79b-ab6e-4016-a7d4-31dffc072b7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.847301] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1833.847301] env[62816]: value = "task-1789068" [ 1833.847301] env[62816]: _type = "Task" [ 1833.847301] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.847554] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1833.847554] env[62816]: value = "task-1789067" [ 1833.847554] env[62816]: _type = "Task" [ 1833.847554] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.868166] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.868403] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789067, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.897152] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789065, 'name': ReconfigVM_Task, 'duration_secs': 0.365349} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.897479] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfigured VM instance instance-00000050 to attach disk [datastore1] volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2/volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.902214] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-909c2312-e497-4231-ba7d-0a20664e2cb2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.916881] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1833.916881] env[62816]: value = "task-1789069" [ 1833.916881] env[62816]: _type = "Task" [ 1833.916881] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.925597] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.249063] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.265359] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789066, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065199} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.265678] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1834.266533] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def3169d-57b7-4e7a-9403-f5e1f1202174 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.291953] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1834.292379] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55ae287a-54f9-47f2-abd8-2618c8762c68 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.317808] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1834.317808] env[62816]: value = "task-1789073" [ 1834.317808] env[62816]: _type = "Task" [ 1834.317808] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.327776] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789073, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.359896] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789068, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.363539] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789067, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.427340] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789069, 'name': ReconfigVM_Task, 'duration_secs': 0.166812} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.427743] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371164', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'name': 'volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'f97ea34e-792e-4023-bd2f-549dba129925', 'attached_at': '', 'detached_at': '', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'serial': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1834.428432] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07d4949d-4ab5-4710-937d-76e5991b01ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.435311] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1834.435311] env[62816]: value = "task-1789074" [ 1834.435311] env[62816]: _type = "Task" [ 1834.435311] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.443851] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789074, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.554978] env[62816]: DEBUG nova.compute.manager [req-f9a755b1-c9b9-49c3-9953-1dd210104903 req-d89bb212-2a5e-4c6f-8727-fc6dae10363e service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Received event network-vif-deleted-bed0373b-9c6a-4357-a640-8218a972cb72 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1834.555354] env[62816]: INFO nova.compute.manager [req-f9a755b1-c9b9-49c3-9953-1dd210104903 req-d89bb212-2a5e-4c6f-8727-fc6dae10363e service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Neutron deleted interface bed0373b-9c6a-4357-a640-8218a972cb72; detaching it from the instance and deleting it from the info cache [ 1834.555457] env[62816]: DEBUG nova.network.neutron [req-f9a755b1-c9b9-49c3-9953-1dd210104903 req-d89bb212-2a5e-4c6f-8727-fc6dae10363e service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.752320] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789059, 'name': CloneVM_Task, 'duration_secs': 12.632374} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.752756] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Created linked-clone VM from snapshot [ 1834.753546] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce0277d-78fd-463b-bbe2-528652ae9c89 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.760614] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Uploading image a1f44929-e6c3-4d27-a610-2c0cbe24b960 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1834.796608] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1834.796608] env[62816]: value = "vm-371173" [ 1834.796608] env[62816]: _type = "VirtualMachine" [ 1834.796608] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1834.796917] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-88df0f10-10cc-4545-bbee-3d509c185f3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.804140] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease: (returnval){ [ 1834.804140] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ba7acb-853d-3f84-4b14-2387442b5e3b" [ 1834.804140] env[62816]: _type = "HttpNfcLease" [ 1834.804140] env[62816]: } obtained for exporting VM: (result){ [ 1834.804140] env[62816]: value = "vm-371173" [ 1834.804140] env[62816]: _type = "VirtualMachine" [ 1834.804140] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1834.804469] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the lease: (returnval){ [ 1834.804469] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ba7acb-853d-3f84-4b14-2387442b5e3b" [ 1834.804469] env[62816]: _type = "HttpNfcLease" [ 1834.804469] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1834.811936] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1834.811936] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ba7acb-853d-3f84-4b14-2387442b5e3b" [ 1834.811936] env[62816]: _type = "HttpNfcLease" [ 1834.811936] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1834.827288] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789073, 'name': ReconfigVM_Task, 'duration_secs': 0.319048} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.827521] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfigured VM instance instance-0000005c to attach disk [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1834.828425] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bedfe025-de30-4c7c-9b72-3ae519270637 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.833771] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1834.833771] env[62816]: value = "task-1789076" [ 1834.833771] env[62816]: _type = "Task" [ 1834.833771] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.841765] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789076, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.858766] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697088} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.862146] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a/642a07d7-8d15-4874-9dbe-bb9aa29e4d8a.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1834.862473] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1834.862828] env[62816]: DEBUG oslo_vmware.api [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789067, 'name': PowerOnVM_Task, 'duration_secs': 0.845577} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.863048] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d3a4515-3414-4628-a8ff-ed79c58a046b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.864736] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1834.867448] env[62816]: DEBUG nova.compute.manager [None req-c198a53f-fd39-4caa-bb77-a23be93b3624 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1834.868172] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84fc150-985a-4252-b6f7-c0b8407df075 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.871459] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1834.871459] env[62816]: value = "task-1789077" [ 1834.871459] env[62816]: _type = "Task" [ 1834.871459] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.882851] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.947834] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789074, 'name': Rename_Task, 'duration_secs': 0.18296} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.948180] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1834.948449] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-584beb07-72da-431c-b83b-c065e70ddc67 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.954474] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1834.954474] env[62816]: value = "task-1789078" [ 1834.954474] env[62816]: _type = "Task" [ 1834.954474] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.965259] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.031634] env[62816]: DEBUG nova.network.neutron [-] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.058273] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b5db802-475e-4e6b-9f1b-6b2b1f89fd4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.067714] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b4a98e-defd-4e21-b647-208cbe13446a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.100312] env[62816]: DEBUG nova.compute.manager [req-f9a755b1-c9b9-49c3-9953-1dd210104903 req-d89bb212-2a5e-4c6f-8727-fc6dae10363e service nova] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Detach interface failed, port_id=bed0373b-9c6a-4357-a640-8218a972cb72, reason: Instance dd833e38-691c-4757-9c6b-659c74343d3e could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1835.312939] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1835.312939] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ba7acb-853d-3f84-4b14-2387442b5e3b" [ 1835.312939] env[62816]: _type = "HttpNfcLease" [ 1835.312939] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1835.313264] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1835.313264] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ba7acb-853d-3f84-4b14-2387442b5e3b" [ 1835.313264] env[62816]: _type = "HttpNfcLease" [ 1835.313264] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1835.313954] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8673134-6cc1-43b5-b98a-ecc7b433b523 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.322279] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a2654e-1b85-d408-da40-bd6d44190ede/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1835.322476] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a2654e-1b85-d408-da40-bd6d44190ede/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1835.396659] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064695} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.396947] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789076, 'name': Rename_Task, 'duration_secs': 0.141948} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.397143] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1835.397627] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1835.398144] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-916515d7-8cc1-4022-bedb-235b39bf70ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.400469] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14cf49fd-5613-4f73-b961-88ee65c389e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.421485] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a/642a07d7-8d15-4874-9dbe-bb9aa29e4d8a.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1835.422929] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e029586a-fa5e-4b3d-9ce4-1c07443a0f4b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.437223] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1835.437223] env[62816]: value = "task-1789079" [ 1835.437223] env[62816]: _type = "Task" [ 1835.437223] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.442827] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1835.442827] env[62816]: value = "task-1789080" [ 1835.442827] env[62816]: _type = "Task" [ 1835.442827] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.446199] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789079, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.457018] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.464311] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789078, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.517134] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a172e707-9c4d-4d48-8210-ca5ab1e8e074 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.534253] env[62816]: INFO nova.compute.manager [-] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Took 1.81 seconds to deallocate network for instance. [ 1835.949739] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789079, 'name': PowerOnVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.959708] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789080, 'name': ReconfigVM_Task, 'duration_secs': 0.460462} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.963891] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a/642a07d7-8d15-4874-9dbe-bb9aa29e4d8a.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1835.964840] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a368dd15-8e98-412a-9951-bc8487492bc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.972093] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789078, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.973753] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1835.973753] env[62816]: value = "task-1789081" [ 1835.973753] env[62816]: _type = "Task" [ 1835.973753] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.983148] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789081, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.043889] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.044163] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.045712] env[62816]: DEBUG nova.objects.instance [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'resources' on Instance uuid dd833e38-691c-4757-9c6b-659c74343d3e {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1836.448875] env[62816]: DEBUG oslo_vmware.api [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789079, 'name': PowerOnVM_Task, 'duration_secs': 0.684276} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.449586] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1836.449586] env[62816]: INFO nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Took 20.30 seconds to spawn the instance on the hypervisor. [ 1836.449773] env[62816]: DEBUG nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1836.450524] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31c9e0c-f1aa-4d6c-ad53-9f34903df7b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.469334] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789078, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.483141] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789081, 'name': Rename_Task, 'duration_secs': 0.172997} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.483425] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1836.483821] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66e4478f-070e-48eb-b6fe-3907ac1b3e6f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.489752] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1836.489752] env[62816]: value = "task-1789083" [ 1836.489752] env[62816]: _type = "Task" [ 1836.489752] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.498051] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789083, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.582891] env[62816]: DEBUG nova.compute.manager [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1836.583105] env[62816]: DEBUG nova.compute.manager [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing instance network info cache due to event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1836.583330] env[62816]: DEBUG oslo_concurrency.lockutils [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.583476] env[62816]: DEBUG oslo_concurrency.lockutils [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.583640] env[62816]: DEBUG nova.network.neutron [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1836.723897] env[62816]: DEBUG nova.compute.manager [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1836.724130] env[62816]: DEBUG nova.compute.manager [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing instance network info cache due to event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1836.724541] env[62816]: DEBUG oslo_concurrency.lockutils [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.734487] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6799dd05-6ab7-42b1-a7a3-937accb4a635 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.744713] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96df11b1-fe56-45f4-848c-042aff678cb3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.781437] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31037857-c6ae-46d4-9d94-8ddb03cda5fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.789082] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d82bea-7aa4-49ec-b2b7-01f5c34f4ced {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.804323] env[62816]: DEBUG nova.compute.provider_tree [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1836.972562] env[62816]: INFO nova.compute.manager [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Took 25.16 seconds to build instance. [ 1836.976766] env[62816]: DEBUG oslo_vmware.api [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789078, 'name': PowerOnVM_Task, 'duration_secs': 1.51885} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.977145] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1837.002022] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789083, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.100224] env[62816]: DEBUG nova.compute.manager [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1837.101145] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-488fd917-877b-4679-b8ba-cde536ca953e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.308513] env[62816]: DEBUG nova.scheduler.client.report [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1837.416409] env[62816]: DEBUG nova.network.neutron [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updated VIF entry in instance network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1837.416909] env[62816]: DEBUG nova.network.neutron [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1837.478469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-04023986-69ea-463f-8042-3b64922186cf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.669s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.478469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 23.912s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.481938] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96bd8665-aa15-41b3-a2ec-bdc3a036a8e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.499534] env[62816]: DEBUG oslo_vmware.api [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789083, 'name': PowerOnVM_Task, 'duration_secs': 0.653746} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.500592] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1837.500990] env[62816]: INFO nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Took 19.06 seconds to spawn the instance on the hypervisor. [ 1837.501337] env[62816]: DEBUG nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1837.502450] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2b616d-6fa9-483f-a27c-3c1931a6f784 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.619364] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b6665a7d-2439-4f4d-934c-779d1f2b0857 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 43.405s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.619695] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "f97ea34e-792e-4023-bd2f-549dba129925" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 24.054s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.620091] env[62816]: INFO nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f97ea34e-792e-4023-bd2f-549dba129925] During sync_power_state the instance has a pending task (spawning). Skip. [ 1837.620462] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "f97ea34e-792e-4023-bd2f-549dba129925" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.813736] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.769s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.845258] env[62816]: INFO nova.scheduler.client.report [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted allocations for instance dd833e38-691c-4757-9c6b-659c74343d3e [ 1837.920017] env[62816]: DEBUG oslo_concurrency.lockutils [req-6f915bc1-36e3-4345-8d0b-8941c48182ae req-b5c5ea72-6c75-4f9c-a32d-df30b97b51d4 service nova] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.920488] env[62816]: DEBUG oslo_concurrency.lockutils [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.920749] env[62816]: DEBUG nova.network.neutron [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1837.991613] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.513s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.023231] env[62816]: INFO nova.compute.manager [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Took 25.30 seconds to build instance. [ 1838.356332] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1838.356600] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371175', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'name': 'volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa719ff5-0219-485f-aac7-2cde4bbef8f6', 'attached_at': '', 'detached_at': '', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'serial': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1838.357526] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ed3b43-b930-4f1a-8bea-a2c4b87dfbdb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.360598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f684eaa-7716-4b0c-8524-0503c5d86e03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "dd833e38-691c-4757-9c6b-659c74343d3e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.152s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.377361] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f81172c-dfe3-4c41-bc11-29bab0b6f4a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.402927] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa/volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1838.404030] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df909c49-f47b-43f3-9f0f-a4264789f1de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.422398] env[62816]: DEBUG oslo_vmware.api [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1838.422398] env[62816]: value = "task-1789084" [ 1838.422398] env[62816]: _type = "Task" [ 1838.422398] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.435997] env[62816]: DEBUG oslo_vmware.api [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789084, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.526312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e255d330-9dc1-46ad-977a-f2b407eab72f tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.813s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.722527] env[62816]: DEBUG nova.network.neutron [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updated VIF entry in instance network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1838.722927] env[62816]: DEBUG nova.network.neutron [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.821741] env[62816]: DEBUG nova.compute.manager [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Received event network-changed-464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1838.821969] env[62816]: DEBUG nova.compute.manager [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Refreshing instance network info cache due to event network-changed-464c5ce0-30b5-473d-910e-343ba514ffa7. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1838.822151] env[62816]: DEBUG oslo_concurrency.lockutils [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] Acquiring lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.822314] env[62816]: DEBUG oslo_concurrency.lockutils [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] Acquired lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.822455] env[62816]: DEBUG nova.network.neutron [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Refreshing network info cache for port 464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1838.855175] env[62816]: DEBUG nova.compute.manager [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1838.855415] env[62816]: DEBUG nova.compute.manager [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing instance network info cache due to event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1838.855638] env[62816]: DEBUG oslo_concurrency.lockutils [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.932561] env[62816]: DEBUG oslo_vmware.api [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789084, 'name': ReconfigVM_Task, 'duration_secs': 0.499711} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.932896] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Reconfigured VM instance instance-00000058 to attach disk [datastore1] volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa/volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1838.937508] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56ee015e-81d2-4e83-8a90-b964fc246bd2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.952387] env[62816]: DEBUG oslo_vmware.api [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1838.952387] env[62816]: value = "task-1789085" [ 1838.952387] env[62816]: _type = "Task" [ 1838.952387] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.960968] env[62816]: DEBUG oslo_vmware.api [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.021346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.021346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.021346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.021346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.021346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.022824] env[62816]: INFO nova.compute.manager [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Terminating instance [ 1839.024579] env[62816]: DEBUG nova.compute.manager [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1839.024784] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1839.025701] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc0712b-70f2-47e2-95e4-1a4bc64029e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.034430] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1839.034707] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bfc2eb7-dc08-49e2-96b1-a4754ad6b541 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.041828] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1839.041828] env[62816]: value = "task-1789086" [ 1839.041828] env[62816]: _type = "Task" [ 1839.041828] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.051039] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789086, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.228520] env[62816]: DEBUG oslo_concurrency.lockutils [req-781ef25c-c8dc-459a-8ed6-9b21bc96d480 req-399d0347-8438-4846-ba89-f73966abd10a service nova] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.229066] env[62816]: DEBUG oslo_concurrency.lockutils [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.229387] env[62816]: DEBUG nova.network.neutron [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1839.461842] env[62816]: DEBUG oslo_vmware.api [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789085, 'name': ReconfigVM_Task, 'duration_secs': 0.162359} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.462134] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371175', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'name': 'volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa719ff5-0219-485f-aac7-2cde4bbef8f6', 'attached_at': '', 'detached_at': '', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'serial': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1839.552202] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789086, 'name': PowerOffVM_Task, 'duration_secs': 0.234008} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.552501] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1839.552677] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1839.552939] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aabdb0b1-8e69-4f39-8e27-a02ad27ca931 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.603274] env[62816]: DEBUG nova.network.neutron [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updated VIF entry in instance network info cache for port 464c5ce0-30b5-473d-910e-343ba514ffa7. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1839.603851] env[62816]: DEBUG nova.network.neutron [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating instance_info_cache with network_info: [{"id": "464c5ce0-30b5-473d-910e-343ba514ffa7", "address": "fa:16:3e:46:53:f1", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.223", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap464c5ce0-30", "ovs_interfaceid": "464c5ce0-30b5-473d-910e-343ba514ffa7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.641495] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1839.641763] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1839.641898] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Deleting the datastore file [datastore1] 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1839.642191] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72e6835d-2a1f-4982-a7b8-40fbb1a61772 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.653042] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for the task: (returnval){ [ 1839.653042] env[62816]: value = "task-1789088" [ 1839.653042] env[62816]: _type = "Task" [ 1839.653042] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.657771] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789088, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.998720] env[62816]: DEBUG nova.network.neutron [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updated VIF entry in instance network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1839.999175] env[62816]: DEBUG nova.network.neutron [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.107149] env[62816]: DEBUG oslo_concurrency.lockutils [req-24e8d398-d606-4883-b84b-c5e8be481ff1 req-c9d8cf3f-4958-4c6c-a157-c3d17f5a3189 service nova] Releasing lock "refresh_cache-e26b6593-7e64-4a43-b09d-92d2e668c25b" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.160776] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789088, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.252715] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.253048] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.253321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.253537] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.253731] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.256183] env[62816]: INFO nova.compute.manager [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Terminating instance [ 1840.258251] env[62816]: DEBUG nova.compute.manager [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1840.258472] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1840.259571] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9479c4c7-6205-4d2b-9e49-d5f0025bbe5e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.268151] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1840.268438] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8f828da-2fc7-4267-a8f1-e294cad073b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.273967] env[62816]: DEBUG oslo_vmware.api [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1840.273967] env[62816]: value = "task-1789089" [ 1840.273967] env[62816]: _type = "Task" [ 1840.273967] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.281611] env[62816]: DEBUG oslo_vmware.api [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.504468] env[62816]: DEBUG oslo_concurrency.lockutils [req-bdeac6ac-dd3e-4c7f-8b07-9bb914e80c5a req-604ca2bb-b889-463b-b7a1-352b827fa288 service nova] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.504954] env[62816]: DEBUG nova.objects.instance [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'flavor' on Instance uuid fa719ff5-0219-485f-aac7-2cde4bbef8f6 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1840.661600] env[62816]: DEBUG oslo_vmware.api [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Task: {'id': task-1789088, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.586093} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.661875] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1840.662084] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1840.662263] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1840.662435] env[62816]: INFO nova.compute.manager [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1840.662678] env[62816]: DEBUG oslo.service.loopingcall [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1840.662868] env[62816]: DEBUG nova.compute.manager [-] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1840.662963] env[62816]: DEBUG nova.network.neutron [-] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1840.788782] env[62816]: DEBUG oslo_vmware.api [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789089, 'name': PowerOffVM_Task, 'duration_secs': 0.318181} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.789108] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1840.789323] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1840.789632] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-035e9879-b83a-43f9-b6ce-8e74f8219fb8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.888009] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1840.888257] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1840.888444] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Deleting the datastore file [datastore1] c9ebcce1-8374-46fb-996f-c271cb8dbf84 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1840.888744] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bedc5e55-c9c1-44e1-bdde-02a7d609efe9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.893272] env[62816]: DEBUG nova.compute.manager [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1840.893352] env[62816]: DEBUG nova.compute.manager [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing instance network info cache due to event network-changed-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1840.894022] env[62816]: DEBUG oslo_concurrency.lockutils [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] Acquiring lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.894022] env[62816]: DEBUG oslo_concurrency.lockutils [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] Acquired lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.894022] env[62816]: DEBUG nova.network.neutron [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Refreshing network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1840.901871] env[62816]: DEBUG oslo_vmware.api [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for the task: (returnval){ [ 1840.901871] env[62816]: value = "task-1789091" [ 1840.901871] env[62816]: _type = "Task" [ 1840.901871] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.911262] env[62816]: DEBUG oslo_vmware.api [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789091, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.965598] env[62816]: DEBUG nova.compute.manager [req-9fbf9ba7-8048-4e35-8514-d0b44a5be7f5 req-533c49e3-01be-4034-9a3c-2c9620a4e3b1 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Received event network-vif-deleted-99350a4f-7d49-4b0d-be97-689bb89aba9c {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1840.965756] env[62816]: INFO nova.compute.manager [req-9fbf9ba7-8048-4e35-8514-d0b44a5be7f5 req-533c49e3-01be-4034-9a3c-2c9620a4e3b1 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Neutron deleted interface 99350a4f-7d49-4b0d-be97-689bb89aba9c; detaching it from the instance and deleting it from the info cache [ 1840.965933] env[62816]: DEBUG nova.network.neutron [req-9fbf9ba7-8048-4e35-8514-d0b44a5be7f5 req-533c49e3-01be-4034-9a3c-2c9620a4e3b1 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.009904] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a68a7d3e-8a81-496d-988e-0c9e16edaac1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.258s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.110244] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.110627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.297264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1841.297558] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.409951] env[62816]: DEBUG oslo_vmware.api [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Task: {'id': task-1789091, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415895} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.410236] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1841.410424] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1841.410606] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1841.410803] env[62816]: INFO nova.compute.manager [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1841.411064] env[62816]: DEBUG oslo.service.loopingcall [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1841.411263] env[62816]: DEBUG nova.compute.manager [-] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1841.411363] env[62816]: DEBUG nova.network.neutron [-] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1841.413011] env[62816]: DEBUG nova.network.neutron [-] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.469119] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-77aa4ac0-c034-43d4-ba8f-b664e7ca5d88 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.480307] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df70b427-1a55-446e-9879-3a96d14ae5ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.511993] env[62816]: DEBUG nova.compute.manager [req-9fbf9ba7-8048-4e35-8514-d0b44a5be7f5 req-533c49e3-01be-4034-9a3c-2c9620a4e3b1 service nova] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Detach interface failed, port_id=99350a4f-7d49-4b0d-be97-689bb89aba9c, reason: Instance 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1841.613546] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1841.617973] env[62816]: DEBUG nova.network.neutron [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updated VIF entry in instance network info cache for port 7c4f15cf-a845-46c3-a9e8-8f650fb6a58b. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1841.618339] env[62816]: DEBUG nova.network.neutron [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [{"id": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "address": "fa:16:3e:4d:5c:f9", "network": {"id": "6300482d-729b-43f3-93d8-31b458aadabe", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1325309668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bca8e53dcceb4a5e945cba0a783b2e31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c4f15cf-a8", "ovs_interfaceid": "7c4f15cf-a845-46c3-a9e8-8f650fb6a58b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1841.800804] env[62816]: INFO nova.compute.manager [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Detaching volume 8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa [ 1841.837266] env[62816]: INFO nova.virt.block_device [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Attempting to driver detach volume 8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa from mountpoint /dev/sdb [ 1841.837529] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1841.837720] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371175', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'name': 'volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa719ff5-0219-485f-aac7-2cde4bbef8f6', 'attached_at': '', 'detached_at': '', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'serial': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1841.838854] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088fb10e-045f-45c9-aafb-a19accbc6ce6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.861386] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2560dabc-87ad-45b3-aa6a-56c6e5dcff46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.868378] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c360d367-10fe-46c1-a964-f3b9cfb8f8a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.888893] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9938cbe1-7cdf-446a-9b47-b91f193e35d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.903833] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] The volume has not been displaced from its original location: [datastore1] volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa/volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1841.909287] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Reconfiguring VM instance instance-00000058 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1841.909634] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5e2db54-ebec-456c-a78b-8992d1268128 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.922413] env[62816]: INFO nova.compute.manager [-] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Took 1.26 seconds to deallocate network for instance. [ 1841.929267] env[62816]: DEBUG oslo_vmware.api [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1841.929267] env[62816]: value = "task-1789092" [ 1841.929267] env[62816]: _type = "Task" [ 1841.929267] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.937411] env[62816]: DEBUG oslo_vmware.api [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.120319] env[62816]: DEBUG oslo_concurrency.lockutils [req-0e5bdf75-d8a0-4005-9b37-ca750113b583 req-02161fdb-d078-4de1-ad1b-8679992efba5 service nova] Releasing lock "refresh_cache-c9ebcce1-8374-46fb-996f-c271cb8dbf84" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.137593] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.137931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.140061] env[62816]: INFO nova.compute.claims [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1842.147446] env[62816]: DEBUG nova.network.neutron [-] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.429528] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.438322] env[62816]: DEBUG oslo_vmware.api [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789092, 'name': ReconfigVM_Task, 'duration_secs': 0.278495} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.438691] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Reconfigured VM instance instance-00000058 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1842.443592] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef28d3ec-4c2f-4b9e-923c-c71d807ec6b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.458869] env[62816]: DEBUG oslo_vmware.api [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1842.458869] env[62816]: value = "task-1789093" [ 1842.458869] env[62816]: _type = "Task" [ 1842.458869] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.466598] env[62816]: DEBUG oslo_vmware.api [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.649762] env[62816]: INFO nova.compute.manager [-] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Took 1.24 seconds to deallocate network for instance. [ 1842.921229] env[62816]: DEBUG nova.compute.manager [req-af1fe821-b2aa-41c0-9a58-e9fe24ca2bfb req-945e9f5d-f15f-4067-9c72-c351e24227a9 service nova] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Received event network-vif-deleted-7c4f15cf-a845-46c3-a9e8-8f650fb6a58b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1842.970666] env[62816]: DEBUG oslo_vmware.api [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789093, 'name': ReconfigVM_Task, 'duration_secs': 0.15255} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.971081] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371175', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'name': 'volume-8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'fa719ff5-0219-485f-aac7-2cde4bbef8f6', 'attached_at': '', 'detached_at': '', 'volume_id': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa', 'serial': '8c2513f6-e91c-4dd9-b4c8-c07faad9a0aa'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1843.156578] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.312128] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3411bfaa-d209-4841-b1b4-e2479a5e2cae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.319649] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e18b753-5a5c-49ad-8f8b-7c0fa7cfa964 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.349489] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d303bf9-abd7-4553-86a2-2d6cf3f19534 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.357293] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ee8523-60a1-4635-af7e-3ce7eeaf7017 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.370443] env[62816]: DEBUG nova.compute.provider_tree [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1843.514392] env[62816]: DEBUG nova.objects.instance [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'flavor' on Instance uuid fa719ff5-0219-485f-aac7-2cde4bbef8f6 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1843.891927] env[62816]: ERROR nova.scheduler.client.report [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [req-24519d43-6c7a-4781-a649-524b58da8231] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-24519d43-6c7a-4781-a649-524b58da8231"}]} [ 1843.910215] env[62816]: DEBUG nova.scheduler.client.report [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1843.925664] env[62816]: DEBUG nova.scheduler.client.report [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1843.925888] env[62816]: DEBUG nova.compute.provider_tree [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 160, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1843.937634] env[62816]: DEBUG nova.scheduler.client.report [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1843.957593] env[62816]: DEBUG nova.scheduler.client.report [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1844.111907] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b200b41-5e80-4acf-b6c7-04bd06ea340d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.116398] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a2654e-1b85-d408-da40-bd6d44190ede/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1844.117222] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052bf967-2691-4285-bfee-452094c09efd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.125073] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a2654e-1b85-d408-da40-bd6d44190ede/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1844.125257] env[62816]: ERROR oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a2654e-1b85-d408-da40-bd6d44190ede/disk-0.vmdk due to incomplete transfer. [ 1844.127141] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-adc57992-d028-4265-8eb5-7cb65d922c5d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.129530] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24aa686-7e0d-4eda-a4fe-6ff98cb66d8a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.161411] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5589b0a7-4e76-47e6-9760-1a36a1507c7b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.164038] env[62816]: DEBUG oslo_vmware.rw_handles [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52a2654e-1b85-d408-da40-bd6d44190ede/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1844.164241] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Uploaded image a1f44929-e6c3-4d27-a610-2c0cbe24b960 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1844.166519] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1844.166698] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9b92041a-32cd-455b-adcc-e694bae57372 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.174897] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b1da26c-d1db-4346-9e15-52880a017ba6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.179366] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1844.179366] env[62816]: value = "task-1789094" [ 1844.179366] env[62816]: _type = "Task" [ 1844.179366] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.191041] env[62816]: DEBUG nova.compute.provider_tree [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1844.197377] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789094, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.523239] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c19467bb-421b-4e9f-9d1c-13aca83b5857 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.225s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.689176] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789094, 'name': Destroy_Task, 'duration_secs': 0.338584} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.689436] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroyed the VM [ 1844.689679] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1844.689920] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-31733b80-59d2-405c-bb55-c9c3de285588 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.697143] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1844.697143] env[62816]: value = "task-1789095" [ 1844.697143] env[62816]: _type = "Task" [ 1844.697143] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.704257] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789095, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.721157] env[62816]: DEBUG nova.scheduler.client.report [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1844.721433] env[62816]: DEBUG nova.compute.provider_tree [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 125 to 126 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1844.721624] env[62816]: DEBUG nova.compute.provider_tree [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1845.206625] env[62816]: DEBUG oslo_vmware.api [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789095, 'name': RemoveSnapshot_Task, 'duration_secs': 0.484714} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.206909] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1845.207169] env[62816]: INFO nova.compute.manager [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 29.29 seconds to snapshot the instance on the hypervisor. [ 1845.226711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.089s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.227225] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1845.230219] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.801s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.230358] env[62816]: DEBUG nova.objects.instance [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lazy-loading 'resources' on Instance uuid 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1845.549139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.549139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.549139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1845.549617] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1845.549806] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.552192] env[62816]: INFO nova.compute.manager [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Terminating instance [ 1845.553970] env[62816]: DEBUG nova.compute.manager [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1845.554184] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1845.555069] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32aa5a53-d9e7-45e8-b50c-71526a80e14b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.562392] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1845.562615] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be6bcc63-3480-4adf-9eb1-d6f3fce69659 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.569142] env[62816]: DEBUG oslo_vmware.api [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1845.569142] env[62816]: value = "task-1789096" [ 1845.569142] env[62816]: _type = "Task" [ 1845.569142] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.576521] env[62816]: DEBUG oslo_vmware.api [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.733683] env[62816]: DEBUG nova.compute.utils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1845.738367] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1845.738605] env[62816]: DEBUG nova.network.neutron [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1845.755586] env[62816]: DEBUG nova.compute.manager [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Found 3 images (rotation: 2) {{(pid=62816) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1845.756183] env[62816]: DEBUG nova.compute.manager [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Rotating out 1 backups {{(pid=62816) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1845.756183] env[62816]: DEBUG nova.compute.manager [None req-3a896ad6-7ca9-42ec-bd5d-35595080f93a tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleting image 141b03bd-f52b-4815-b348-7822ad15c428 {{(pid=62816) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1845.780343] env[62816]: DEBUG nova.policy [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1845.892629] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ed52da9-a9e8-4b2f-90f5-a1a2df377cce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.900315] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-425ef358-33a7-4def-b99e-9e4776c20d3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.932009] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a5a304-d61a-472f-aaca-a9a5afd9a6fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.939677] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c878a9-e483-491d-a1ea-201a9123eebb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.953042] env[62816]: DEBUG nova.compute.provider_tree [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1846.062291] env[62816]: DEBUG nova.network.neutron [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Successfully created port: 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1846.079375] env[62816]: DEBUG oslo_vmware.api [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789096, 'name': PowerOffVM_Task, 'duration_secs': 0.234742} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.079668] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1846.079844] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1846.080110] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24afee5b-0b24-4fb2-a5c4-4da86fecdaa4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.154501] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1846.154740] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1846.154994] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleting the datastore file [datastore1] fa719ff5-0219-485f-aac7-2cde4bbef8f6 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1846.155571] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c7753f0-2dca-4c41-a755-a803203d1f1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.163919] env[62816]: DEBUG oslo_vmware.api [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1846.163919] env[62816]: value = "task-1789098" [ 1846.163919] env[62816]: _type = "Task" [ 1846.163919] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.172684] env[62816]: DEBUG oslo_vmware.api [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.238814] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1846.456493] env[62816]: DEBUG nova.scheduler.client.report [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1846.663687] env[62816]: DEBUG oslo_concurrency.lockutils [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.663993] env[62816]: DEBUG oslo_concurrency.lockutils [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.675270] env[62816]: DEBUG oslo_vmware.api [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135712} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.676334] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1846.676615] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1846.676807] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1846.676986] env[62816]: INFO nova.compute.manager [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1846.677265] env[62816]: DEBUG oslo.service.loopingcall [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1846.678585] env[62816]: DEBUG nova.compute.manager [-] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1846.678585] env[62816]: DEBUG nova.network.neutron [-] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1846.961498] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.964357] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.808s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.964837] env[62816]: DEBUG nova.objects.instance [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lazy-loading 'resources' on Instance uuid c9ebcce1-8374-46fb-996f-c271cb8dbf84 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1846.988397] env[62816]: INFO nova.scheduler.client.report [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Deleted allocations for instance 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a [ 1847.170582] env[62816]: INFO nova.compute.manager [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Detaching volume be3394e2-7a5f-4625-af79-3bd3e05d60b1 [ 1847.177204] env[62816]: DEBUG nova.compute.manager [req-eead63ec-2378-4bf7-8185-9dc16cf7f0bb req-e7534e0f-c3c1-4bd8-b490-ce987bb8491f service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Received event network-vif-deleted-2d41a0d3-8eb3-4503-8363-6ec1b787de60 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1847.177695] env[62816]: INFO nova.compute.manager [req-eead63ec-2378-4bf7-8185-9dc16cf7f0bb req-e7534e0f-c3c1-4bd8-b490-ce987bb8491f service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Neutron deleted interface 2d41a0d3-8eb3-4503-8363-6ec1b787de60; detaching it from the instance and deleting it from the info cache [ 1847.178321] env[62816]: DEBUG nova.network.neutron [req-eead63ec-2378-4bf7-8185-9dc16cf7f0bb req-e7534e0f-c3c1-4bd8-b490-ce987bb8491f service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.215490] env[62816]: INFO nova.virt.block_device [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Attempting to driver detach volume be3394e2-7a5f-4625-af79-3bd3e05d60b1 from mountpoint /dev/sdb [ 1847.216553] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1847.216553] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371155', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'name': 'volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b9e8af08-9579-4dbf-8ea1-35ffab75e159', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'serial': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1847.217332] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2120082f-d9e1-42f4-9f6a-54b077433309 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.243258] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc62c2e7-5759-49ef-b0ed-f576fbe8d329 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.251074] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1847.254916] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c90ea6-57cb-4ee8-9fdb-5f3fe7b77e74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.283593] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd547be2-85ec-4b4f-8a43-9eec5384301b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.301399] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] The volume has not been displaced from its original location: [datastore1] volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1/volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1847.307353] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfiguring VM instance instance-0000004e to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1847.310206] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1847.310436] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1847.310595] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1847.310781] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1847.310960] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1847.311140] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1847.311356] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1847.311517] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1847.311687] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1847.311850] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1847.312068] env[62816]: DEBUG nova.virt.hardware [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1847.312349] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae36ed5c-b542-4410-be72-6ebccfae24c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.326430] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f754a37e-df7d-45f6-a6bf-fc605840663d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.335371] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fa4b6e-4513-4b10-a3aa-66ce44b50e43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.338931] env[62816]: DEBUG oslo_vmware.api [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1847.338931] env[62816]: value = "task-1789099" [ 1847.338931] env[62816]: _type = "Task" [ 1847.338931] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.355988] env[62816]: DEBUG oslo_vmware.api [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789099, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.501223] env[62816]: DEBUG oslo_concurrency.lockutils [None req-171d9f55-c194-4b88-a785-ab299d6446ff tempest-DeleteServersAdminTestJSON-179893403 tempest-DeleteServersAdminTestJSON-179893403-project-member] Lock "642a07d7-8d15-4874-9dbe-bb9aa29e4d8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.481s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.595789] env[62816]: DEBUG nova.compute.manager [req-659a988f-81ac-4222-b49e-977553c2e87d req-94be55aa-86d4-481d-ba17-4452a55a9689 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-vif-plugged-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1847.596038] env[62816]: DEBUG oslo_concurrency.lockutils [req-659a988f-81ac-4222-b49e-977553c2e87d req-94be55aa-86d4-481d-ba17-4452a55a9689 service nova] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.596244] env[62816]: DEBUG oslo_concurrency.lockutils [req-659a988f-81ac-4222-b49e-977553c2e87d req-94be55aa-86d4-481d-ba17-4452a55a9689 service nova] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.596422] env[62816]: DEBUG oslo_concurrency.lockutils [req-659a988f-81ac-4222-b49e-977553c2e87d req-94be55aa-86d4-481d-ba17-4452a55a9689 service nova] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.596603] env[62816]: DEBUG nova.compute.manager [req-659a988f-81ac-4222-b49e-977553c2e87d req-94be55aa-86d4-481d-ba17-4452a55a9689 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] No waiting events found dispatching network-vif-plugged-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1847.596842] env[62816]: WARNING nova.compute.manager [req-659a988f-81ac-4222-b49e-977553c2e87d req-94be55aa-86d4-481d-ba17-4452a55a9689 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received unexpected event network-vif-plugged-9f110684-506a-45d4-bf70-da542c84eeb8 for instance with vm_state building and task_state spawning. [ 1847.653315] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad5b329f-9063-4626-83c9-a6e27f59fd4d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.659026] env[62816]: DEBUG nova.network.neutron [-] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1847.663240] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1a0155-40b9-4947-bde3-c0b4f076be34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.703362] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb2fa700-9803-4216-9c1e-56b6f41441d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.705743] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8b5538-a9d1-4274-aa41-7d6b9ccde838 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.717920] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a197d6c-d79d-4807-a114-38ad8e163326 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.730048] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84c8993-3c64-4f3f-89c7-b8d1183b21a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.744903] env[62816]: DEBUG nova.compute.provider_tree [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1847.757828] env[62816]: DEBUG nova.network.neutron [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Successfully updated port: 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1847.760080] env[62816]: DEBUG nova.compute.manager [req-eead63ec-2378-4bf7-8185-9dc16cf7f0bb req-e7534e0f-c3c1-4bd8-b490-ce987bb8491f service nova] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Detach interface failed, port_id=2d41a0d3-8eb3-4503-8363-6ec1b787de60, reason: Instance fa719ff5-0219-485f-aac7-2cde4bbef8f6 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1847.761394] env[62816]: DEBUG nova.scheduler.client.report [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1847.848591] env[62816]: DEBUG oslo_vmware.api [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789099, 'name': ReconfigVM_Task, 'duration_secs': 0.275099} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.848903] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Reconfigured VM instance instance-0000004e to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1847.853546] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c82d6c86-cd96-47ee-82fd-9168f9c01f0e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.868241] env[62816]: DEBUG oslo_vmware.api [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1847.868241] env[62816]: value = "task-1789100" [ 1847.868241] env[62816]: _type = "Task" [ 1847.868241] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.875843] env[62816]: DEBUG oslo_vmware.api [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.161455] env[62816]: INFO nova.compute.manager [-] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Took 1.48 seconds to deallocate network for instance. [ 1848.261767] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1848.262099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1848.262099] env[62816]: DEBUG nova.network.neutron [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1848.266504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.302s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.273331] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.273559] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.273734] env[62816]: DEBUG nova.compute.manager [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1848.274847] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2facb20b-1722-4ff2-9135-01e75a23cb91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.282156] env[62816]: DEBUG nova.compute.manager [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1848.282695] env[62816]: DEBUG nova.objects.instance [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'flavor' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1848.288519] env[62816]: INFO nova.scheduler.client.report [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Deleted allocations for instance c9ebcce1-8374-46fb-996f-c271cb8dbf84 [ 1848.378074] env[62816]: DEBUG oslo_vmware.api [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789100, 'name': ReconfigVM_Task, 'duration_secs': 0.148385} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.378425] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371155', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'name': 'volume-be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b9e8af08-9579-4dbf-8ea1-35ffab75e159', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1', 'serial': 'be3394e2-7a5f-4625-af79-3bd3e05d60b1'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1848.667927] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.668240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.668470] env[62816]: DEBUG nova.objects.instance [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'resources' on Instance uuid fa719ff5-0219-485f-aac7-2cde4bbef8f6 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1848.787587] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1848.787889] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d7c9805-c34f-4f14-9738-ea3b979fe70c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.796700] env[62816]: DEBUG oslo_vmware.api [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1848.796700] env[62816]: value = "task-1789101" [ 1848.796700] env[62816]: _type = "Task" [ 1848.796700] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.797459] env[62816]: DEBUG oslo_concurrency.lockutils [None req-24ed53e7-0e83-4db5-b2d1-885ad642c2c1 tempest-ServerRescueTestJSONUnderV235-1756607083 tempest-ServerRescueTestJSONUnderV235-1756607083-project-member] Lock "c9ebcce1-8374-46fb-996f-c271cb8dbf84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.544s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.806974] env[62816]: DEBUG oslo_vmware.api [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789101, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.807738] env[62816]: DEBUG nova.network.neutron [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1848.931473] env[62816]: DEBUG nova.objects.instance [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'flavor' on Instance uuid b9e8af08-9579-4dbf-8ea1-35ffab75e159 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.026109] env[62816]: DEBUG nova.network.neutron [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.310402] env[62816]: DEBUG oslo_vmware.api [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789101, 'name': PowerOffVM_Task, 'duration_secs': 0.185449} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.310704] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1849.310837] env[62816]: DEBUG nova.compute.manager [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1849.311653] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33e4828-203a-4a5f-a454-0b3b26708abc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.326198] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a17e75b-3ce5-41e6-b45b-f265b31fbb70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.332591] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da3b0a3-92bf-43fc-866c-3c36b0e02218 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.368523] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d83419a7-60db-4410-89e7-a34fe5bcad13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.376540] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9b8a22-f1f5-4a3c-91fe-2dd72847e294 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.391124] env[62816]: DEBUG nova.compute.provider_tree [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.529199] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1849.529534] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Instance network_info: |[{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1849.529957] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:1a:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f110684-506a-45d4-bf70-da542c84eeb8', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1849.538082] env[62816]: DEBUG oslo.service.loopingcall [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1849.538300] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1849.538522] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-21972eb4-085b-4817-9341-c71858443b2a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.558419] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1849.558419] env[62816]: value = "task-1789102" [ 1849.558419] env[62816]: _type = "Task" [ 1849.558419] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.565583] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789102, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.623810] env[62816]: DEBUG nova.compute.manager [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1849.624015] env[62816]: DEBUG nova.compute.manager [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing instance network info cache due to event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1849.624243] env[62816]: DEBUG oslo_concurrency.lockutils [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.624391] env[62816]: DEBUG oslo_concurrency.lockutils [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.624555] env[62816]: DEBUG nova.network.neutron [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1849.826138] env[62816]: DEBUG oslo_concurrency.lockutils [None req-6c65ba8b-aa18-4d94-a9c0-eea29dc375af tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.552s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.894254] env[62816]: DEBUG nova.scheduler.client.report [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1849.938804] env[62816]: DEBUG oslo_concurrency.lockutils [None req-164b8dd4-105c-40b3-b091-7a887e263798 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.070476] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789102, 'name': CreateVM_Task, 'duration_secs': 0.305145} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.070593] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1850.071312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.071702] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.071832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1850.072118] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db6879eb-9855-4e49-9e1b-50d8b1dcb563 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.082025] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1850.082025] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52909eab-5015-24b3-81be-07d73c7372e1" [ 1850.082025] env[62816]: _type = "Task" [ 1850.082025] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.093649] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52909eab-5015-24b3-81be-07d73c7372e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.401165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.731s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.440454] env[62816]: INFO nova.scheduler.client.report [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleted allocations for instance fa719ff5-0219-485f-aac7-2cde4bbef8f6 [ 1850.476028] env[62816]: DEBUG nova.network.neutron [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updated VIF entry in instance network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1850.476399] env[62816]: DEBUG nova.network.neutron [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.600174] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52909eab-5015-24b3-81be-07d73c7372e1, 'name': SearchDatastore_Task, 'duration_secs': 0.012061} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.600402] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.600707] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1850.601028] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.601170] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.601313] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1850.601622] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34a6ac74-5d6b-4705-9131-257cd38a5037 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.613789] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1850.613997] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1850.615421] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a1be1be-ff6d-40e2-94a1-d3188150c44b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.621736] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1850.621736] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b1867b-51d6-bea1-7504-50215054624d" [ 1850.621736] env[62816]: _type = "Task" [ 1850.621736] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.633593] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b1867b-51d6-bea1-7504-50215054624d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.954922] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8182c7c5-8ff1-4790-9b1e-3817c3325aa1 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "fa719ff5-0219-485f-aac7-2cde4bbef8f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.404s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1850.979440] env[62816]: DEBUG oslo_concurrency.lockutils [req-73d07390-cead-49df-8bcb-29f196e415c7 req-a1fb42f0-d385-47f2-85a1-0da9c4fcc789 service nova] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.133317] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b1867b-51d6-bea1-7504-50215054624d, 'name': SearchDatastore_Task, 'duration_secs': 0.009762} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.134482] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e205ab-db18-4db0-aa96-2f9fbd735942 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.139876] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1851.139876] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b5a0a6-2f37-7c32-20cd-f1f41673880f" [ 1851.139876] env[62816]: _type = "Task" [ 1851.139876] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.148874] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b5a0a6-2f37-7c32-20cd-f1f41673880f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.168477] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.168477] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.168652] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.168869] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.169128] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.172011] env[62816]: INFO nova.compute.manager [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Terminating instance [ 1851.174098] env[62816]: DEBUG nova.compute.manager [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1851.174777] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.175741] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d739dde5-aaaa-4503-9a5c-75f2c005f843 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.183315] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.185535] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1222980e-158c-4ceb-bcba-18164dc195a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.194210] env[62816]: DEBUG oslo_vmware.api [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1851.194210] env[62816]: value = "task-1789103" [ 1851.194210] env[62816]: _type = "Task" [ 1851.194210] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.204719] env[62816]: DEBUG oslo_vmware.api [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789103, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.496780] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.497142] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.651191] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b5a0a6-2f37-7c32-20cd-f1f41673880f, 'name': SearchDatastore_Task, 'duration_secs': 0.009956} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.651488] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.651754] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d/aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1851.652029] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cb96ea4-b612-46fc-a580-0ab15b015e8d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.659111] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1851.659111] env[62816]: value = "task-1789104" [ 1851.659111] env[62816]: _type = "Task" [ 1851.659111] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.671411] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.705026] env[62816]: DEBUG oslo_vmware.api [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789103, 'name': PowerOffVM_Task, 'duration_secs': 0.241303} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.705026] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1851.705026] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1851.705026] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b308be6-e6f6-47e5-94aa-39da3c42fd73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.778428] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1851.778708] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1851.778933] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleting the datastore file [datastore1] b9e8af08-9579-4dbf-8ea1-35ffab75e159 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1851.779159] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41413e7d-e872-43cd-91c1-3ec634c3e7da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.789502] env[62816]: DEBUG oslo_vmware.api [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1851.789502] env[62816]: value = "task-1789106" [ 1851.789502] env[62816]: _type = "Task" [ 1851.789502] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.797741] env[62816]: DEBUG oslo_vmware.api [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789106, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.000635] env[62816]: DEBUG nova.compute.utils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1852.176755] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789104, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.180369] env[62816]: DEBUG nova.compute.manager [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Stashing vm_state: stopped {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1852.299458] env[62816]: DEBUG oslo_vmware.api [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789106, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.480669} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.299732] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1852.299929] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1852.301404] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1852.301634] env[62816]: INFO nova.compute.manager [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1852.301893] env[62816]: DEBUG oslo.service.loopingcall [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.302109] env[62816]: DEBUG nova.compute.manager [-] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1852.302227] env[62816]: DEBUG nova.network.neutron [-] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1852.503686] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.671610] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528293} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.671610] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d/aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1852.671610] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1852.671610] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-49e8cf3d-38d2-419e-9819-913386924970 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.678275] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1852.678275] env[62816]: value = "task-1789107" [ 1852.678275] env[62816]: _type = "Task" [ 1852.678275] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.691367] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789107, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.703474] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.703736] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.161566] env[62816]: DEBUG nova.compute.manager [req-71f453a2-7df0-44d3-9c88-ad8cb525d9d4 req-2ba97085-af5d-4607-bd47-1d350ed50ad1 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Received event network-vif-deleted-3f45a830-39df-4031-a603-7b72a5562ec6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1853.161566] env[62816]: INFO nova.compute.manager [req-71f453a2-7df0-44d3-9c88-ad8cb525d9d4 req-2ba97085-af5d-4607-bd47-1d350ed50ad1 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Neutron deleted interface 3f45a830-39df-4031-a603-7b72a5562ec6; detaching it from the instance and deleting it from the info cache [ 1853.161860] env[62816]: DEBUG nova.network.neutron [req-71f453a2-7df0-44d3-9c88-ad8cb525d9d4 req-2ba97085-af5d-4607-bd47-1d350ed50ad1 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.198970] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789107, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078821} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.199439] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1853.200752] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e17b2a2-9db1-4454-8f4e-362199d0e3c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.210581] env[62816]: INFO nova.compute.claims [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1853.254695] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d/aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1853.261188] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2722087b-d38c-49d6-add7-b82a7c4b473f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.289949] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.290354] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.300860] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1853.300860] env[62816]: value = "task-1789108" [ 1853.300860] env[62816]: _type = "Task" [ 1853.300860] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.313271] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789108, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.359040] env[62816]: DEBUG nova.network.neutron [-] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.576645] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.576959] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.577193] env[62816]: INFO nova.compute.manager [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Attaching volume 8de9eb81-0f36-4069-9c8b-8bf875c75c98 to /dev/sdb [ 1853.621025] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099a00d8-2961-4f3c-8602-98b3963ac20a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.627717] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70738fd3-d7e8-453a-8cfd-ad9643997d58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.644935] env[62816]: DEBUG nova.virt.block_device [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating existing volume attachment record: 299053fa-4d1b-4c15-9efd-c6ff824e794a {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1853.665596] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b7e0698-df40-473f-9a7a-1dfa224dfbe4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.675033] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0cc6a1-1219-42c6-8e3c-75953505a2f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.704206] env[62816]: DEBUG nova.compute.manager [req-71f453a2-7df0-44d3-9c88-ad8cb525d9d4 req-2ba97085-af5d-4607-bd47-1d350ed50ad1 service nova] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Detach interface failed, port_id=3f45a830-39df-4031-a603-7b72a5562ec6, reason: Instance b9e8af08-9579-4dbf-8ea1-35ffab75e159 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1853.708849] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.709104] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.736883] env[62816]: INFO nova.compute.resource_tracker [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating resource usage from migration 9c3bf0c0-67f1-4b43-8a77-c2212e13f9e5 [ 1853.795850] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1853.809545] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.862621] env[62816]: INFO nova.compute.manager [-] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Took 1.56 seconds to deallocate network for instance. [ 1853.919147] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a438bb95-9b51-41c9-a417-67564d841820 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.929265] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7990713-82fe-44dd-8ddb-bebe6fd7e77d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.962473] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d02029-7749-41dc-a99d-6a7b32711add {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.970279] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca72af74-7a5e-4193-b296-24f59da89e23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.985920] env[62816]: DEBUG nova.compute.provider_tree [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1854.211951] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1854.315573] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789108, 'name': ReconfigVM_Task, 'duration_secs': 0.633692} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.315865] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Reconfigured VM instance instance-0000005e to attach disk [datastore1] aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d/aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1854.316694] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2dea4844-b9ba-44de-8922-6a8edaf83cd6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.319326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.324545] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1854.324545] env[62816]: value = "task-1789110" [ 1854.324545] env[62816]: _type = "Task" [ 1854.324545] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.332936] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789110, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.372471] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.489771] env[62816]: DEBUG nova.scheduler.client.report [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1854.732898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.834314] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789110, 'name': Rename_Task, 'duration_secs': 0.173852} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.834601] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1854.834847] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5d7a857-26c8-4325-a2b8-cba2f19255c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.841549] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1854.841549] env[62816]: value = "task-1789111" [ 1854.841549] env[62816]: _type = "Task" [ 1854.841549] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.849068] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.996592] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.293s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.996772] env[62816]: INFO nova.compute.manager [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Migrating [ 1855.003481] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.684s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1855.005056] env[62816]: INFO nova.compute.claims [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1855.353408] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789111, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.518061] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1855.518413] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1855.518704] env[62816]: DEBUG nova.network.neutron [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1855.853621] env[62816]: DEBUG oslo_vmware.api [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789111, 'name': PowerOnVM_Task, 'duration_secs': 0.543658} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.854145] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1855.854145] env[62816]: INFO nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Took 8.60 seconds to spawn the instance on the hypervisor. [ 1855.854318] env[62816]: DEBUG nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1855.855097] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a825e3-21fa-49bd-b36f-dd5c2a241e0c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.192021] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81606858-c813-41aa-ad71-e80eae27eea6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.204172] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de179bee-6ccc-42ad-9ff4-45f985743551 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.235267] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bb5be8-0dd0-4471-a2e8-05563c118663 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.243034] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702bccd1-fdd8-4576-a6cd-907906097cb1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.256224] env[62816]: DEBUG nova.compute.provider_tree [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1856.363996] env[62816]: DEBUG nova.network.neutron [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1856.372241] env[62816]: INFO nova.compute.manager [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Took 14.25 seconds to build instance. [ 1856.760048] env[62816]: DEBUG nova.scheduler.client.report [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1856.870847] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1856.878262] env[62816]: DEBUG oslo_concurrency.lockutils [None req-923a6301-0210-4ca9-921e-95b216842dab tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.767s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.130157] env[62816]: DEBUG nova.compute.manager [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1857.130418] env[62816]: DEBUG nova.compute.manager [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing instance network info cache due to event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1857.130680] env[62816]: DEBUG oslo_concurrency.lockutils [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.130849] env[62816]: DEBUG oslo_concurrency.lockutils [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.131074] env[62816]: DEBUG nova.network.neutron [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1857.264765] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.267619] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1857.270608] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.898s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.270846] env[62816]: DEBUG nova.objects.instance [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'resources' on Instance uuid b9e8af08-9579-4dbf-8ea1-35ffab75e159 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1857.780692] env[62816]: DEBUG nova.compute.utils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1857.784776] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1857.784776] env[62816]: DEBUG nova.network.neutron [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1857.824185] env[62816]: DEBUG nova.policy [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a1765ada82a4b8e8ded3bcc5c92e181', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b4ced24cf2a4d88ba462e2f9685af14', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1857.859160] env[62816]: DEBUG nova.network.neutron [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updated VIF entry in instance network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1857.859160] env[62816]: DEBUG nova.network.neutron [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.955141] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3317b2fb-3d6f-49d3-91af-7c929fb34e77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.963077] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d425dd-d031-4c50-b724-a24f1373e8f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.996810] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17a2974-c314-4237-a76a-8bfa03084717 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.005397] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8062ae8-dc78-4405-a9bb-efe548e6b379 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.019853] env[62816]: DEBUG nova.compute.provider_tree [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1858.030093] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.030332] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.148656] env[62816]: DEBUG nova.network.neutron [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Successfully created port: e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1858.201992] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1858.202292] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371177', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'name': 'volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'serial': '8de9eb81-0f36-4069-9c8b-8bf875c75c98'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1858.203209] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db6bc9d-ce29-4863-90f4-e541e204fbe6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.219860] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a5131b-86f2-4f87-a8d6-8b86f6a0cc56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.243933] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98/volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1858.244270] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98eb8e3b-b339-4e43-a3c9-e600cf36cae5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.261776] env[62816]: DEBUG oslo_vmware.api [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1858.261776] env[62816]: value = "task-1789113" [ 1858.261776] env[62816]: _type = "Task" [ 1858.261776] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.269707] env[62816]: DEBUG oslo_vmware.api [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789113, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.288468] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1858.363545] env[62816]: DEBUG oslo_concurrency.lockutils [req-e57616af-a629-4536-aac2-80b99d3ffc60 req-ea96d4b8-5142-420b-afec-a29be5b6c6df service nova] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.384812] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f86b066f-d032-47da-88ce-5cb24e476159 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.405757] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1858.524025] env[62816]: DEBUG nova.scheduler.client.report [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1858.532760] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1858.771952] env[62816]: DEBUG oslo_vmware.api [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789113, 'name': ReconfigVM_Task, 'duration_secs': 0.340888} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.772313] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98/volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1858.776922] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70e674e2-09cb-4c68-b80e-890ec5e82bf2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.795452] env[62816]: DEBUG oslo_vmware.api [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1858.795452] env[62816]: value = "task-1789114" [ 1858.795452] env[62816]: _type = "Task" [ 1858.795452] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.803544] env[62816]: DEBUG oslo_vmware.api [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789114, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.911445] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1858.911708] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c8a0f3e9-39d6-4416-8ada-5ab62c6a30c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.918222] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1858.918222] env[62816]: value = "task-1789115" [ 1858.918222] env[62816]: _type = "Task" [ 1858.918222] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.926059] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.027591] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.757s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.030803] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.297s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.032530] env[62816]: INFO nova.compute.claims [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1859.049936] env[62816]: INFO nova.scheduler.client.report [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleted allocations for instance b9e8af08-9579-4dbf-8ea1-35ffab75e159 [ 1859.051532] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.300025] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1859.311299] env[62816]: DEBUG oslo_vmware.api [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789114, 'name': ReconfigVM_Task, 'duration_secs': 0.131682} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.312051] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371177', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'name': 'volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'serial': '8de9eb81-0f36-4069-9c8b-8bf875c75c98'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1859.324843] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1859.325664] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1859.325664] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1859.325664] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1859.325664] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1859.325858] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1859.325926] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1859.326134] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1859.326461] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1859.326697] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1859.326901] env[62816]: DEBUG nova.virt.hardware [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1859.327958] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12581ebe-0d64-46f6-b660-13cbab3cb9db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.336704] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e12f0a98-cffa-47f8-a66e-7a3abcbdc4e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.430105] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1859.430383] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1859.558752] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18a1133d-d3a0-4ec3-bea1-9eb9256ecd69 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "b9e8af08-9579-4dbf-8ea1-35ffab75e159" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.390s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.586123] env[62816]: DEBUG nova.compute.manager [req-082ce401-1ed3-44e3-b262-3a292d7a283f req-14af98ea-aab3-4c28-8b2e-ccfd144aad98 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Received event network-vif-plugged-e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1859.586123] env[62816]: DEBUG oslo_concurrency.lockutils [req-082ce401-1ed3-44e3-b262-3a292d7a283f req-14af98ea-aab3-4c28-8b2e-ccfd144aad98 service nova] Acquiring lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.586123] env[62816]: DEBUG oslo_concurrency.lockutils [req-082ce401-1ed3-44e3-b262-3a292d7a283f req-14af98ea-aab3-4c28-8b2e-ccfd144aad98 service nova] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.586123] env[62816]: DEBUG oslo_concurrency.lockutils [req-082ce401-1ed3-44e3-b262-3a292d7a283f req-14af98ea-aab3-4c28-8b2e-ccfd144aad98 service nova] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.586123] env[62816]: DEBUG nova.compute.manager [req-082ce401-1ed3-44e3-b262-3a292d7a283f req-14af98ea-aab3-4c28-8b2e-ccfd144aad98 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] No waiting events found dispatching network-vif-plugged-e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1859.586942] env[62816]: WARNING nova.compute.manager [req-082ce401-1ed3-44e3-b262-3a292d7a283f req-14af98ea-aab3-4c28-8b2e-ccfd144aad98 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Received unexpected event network-vif-plugged-e9b1f47c-7b77-4707-a285-7130979eca9e for instance with vm_state building and task_state spawning. [ 1859.661870] env[62816]: DEBUG nova.network.neutron [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Successfully updated port: e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1859.937247] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1859.937522] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1859.937661] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1859.937844] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1859.937991] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1859.938161] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1859.938363] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1859.938527] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1859.938696] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1859.938864] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1859.939050] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1859.944252] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28834535-c1af-4458-9cc7-ff77d4f215b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.961268] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1859.961268] env[62816]: value = "task-1789116" [ 1859.961268] env[62816]: _type = "Task" [ 1859.961268] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.969440] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789116, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.166698] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.166698] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquired lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.166698] env[62816]: DEBUG nova.network.neutron [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.201095] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9131ae-b85f-431e-8e82-d9d3c86baf35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.210423] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-859db0e3-aa34-4adf-88d5-8a9e5f25716c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.241465] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f7f623-4b64-4fab-9a06-4ef853c578b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.249973] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caefa351-c154-42c5-83c2-056b5d155a45 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.265690] env[62816]: DEBUG nova.compute.provider_tree [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1860.347471] env[62816]: DEBUG nova.objects.instance [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.471383] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789116, 'name': ReconfigVM_Task, 'duration_secs': 0.164983} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.471731] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1860.701701] env[62816]: DEBUG nova.network.neutron [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1860.769265] env[62816]: DEBUG nova.scheduler.client.report [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1860.849783] env[62816]: DEBUG nova.network.neutron [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updating instance_info_cache with network_info: [{"id": "e9b1f47c-7b77-4707-a285-7130979eca9e", "address": "fa:16:3e:ca:54:21", "network": {"id": "7a6b749a-f038-4d2e-b502-671925db60d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1409965876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b4ced24cf2a4d88ba462e2f9685af14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9b1f47c-7b", "ovs_interfaceid": "e9b1f47c-7b77-4707-a285-7130979eca9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.853386] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f48967b-cddc-41b6-ac7d-47d40fe81205 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.276s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.978582] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1860.978846] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1860.979022] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1860.979242] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1860.979451] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1860.979620] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1860.979829] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1860.979990] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1860.980179] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1860.980353] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1860.980531] env[62816]: DEBUG nova.virt.hardware [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1860.985865] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Reconfiguring VM instance instance-00000039 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1860.986159] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bb74440-cdbd-424b-a31d-a4aa60e9e8a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.005441] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1861.005441] env[62816]: value = "task-1789118" [ 1861.005441] env[62816]: _type = "Task" [ 1861.005441] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.013302] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789118, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.275026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.275370] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1861.278403] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.227s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.280052] env[62816]: INFO nova.compute.claims [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1861.355186] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Releasing lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.355520] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Instance network_info: |[{"id": "e9b1f47c-7b77-4707-a285-7130979eca9e", "address": "fa:16:3e:ca:54:21", "network": {"id": "7a6b749a-f038-4d2e-b502-671925db60d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1409965876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b4ced24cf2a4d88ba462e2f9685af14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9b1f47c-7b", "ovs_interfaceid": "e9b1f47c-7b77-4707-a285-7130979eca9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1861.356158] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:54:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57d4be17-536f-4a81-bea9-6547bd50f4a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e9b1f47c-7b77-4707-a285-7130979eca9e', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1861.363740] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Creating folder: Project (1b4ced24cf2a4d88ba462e2f9685af14). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1861.364014] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17003af9-2274-4135-ac43-654cdfed845c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.374795] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Created folder: Project (1b4ced24cf2a4d88ba462e2f9685af14) in parent group-v370905. [ 1861.374986] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Creating folder: Instances. Parent ref: group-v371178. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1861.375228] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c46f39b-870c-4b00-b7c4-149f1057ac20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.384173] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Created folder: Instances in parent group-v371178. [ 1861.384400] env[62816]: DEBUG oslo.service.loopingcall [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.384595] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1861.384782] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3445c0a8-d145-40d5-b6aa-f5aca50d5c3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.403558] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1861.403558] env[62816]: value = "task-1789121" [ 1861.403558] env[62816]: _type = "Task" [ 1861.403558] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.415021] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789121, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.515494] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789118, 'name': ReconfigVM_Task, 'duration_secs': 0.196077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.515796] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Reconfigured VM instance instance-00000039 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1861.516613] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f375db3c-92e7-471f-96a3-aba2bd48b05e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.539595] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c/9745413b-2bd8-45d7-8491-483e4921b59c.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1861.539869] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e37660e-0da1-4a76-90c4-e449b23aa56b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.557791] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1861.557791] env[62816]: value = "task-1789122" [ 1861.557791] env[62816]: _type = "Task" [ 1861.557791] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.565444] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789122, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.613212] env[62816]: DEBUG nova.compute.manager [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Received event network-changed-e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1861.613417] env[62816]: DEBUG nova.compute.manager [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Refreshing instance network info cache due to event network-changed-e9b1f47c-7b77-4707-a285-7130979eca9e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1861.613654] env[62816]: DEBUG oslo_concurrency.lockutils [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] Acquiring lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.613803] env[62816]: DEBUG oslo_concurrency.lockutils [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] Acquired lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.613974] env[62816]: DEBUG nova.network.neutron [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Refreshing network info cache for port e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.731024] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.731312] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.784876] env[62816]: DEBUG nova.compute.utils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1861.788160] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1861.788301] env[62816]: DEBUG nova.network.neutron [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1861.848188] env[62816]: DEBUG nova.policy [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5ffca35ab8614990be3ff2c9697d424f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef0dee852154407fa3201a860c55bf3c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1861.913075] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789121, 'name': CreateVM_Task, 'duration_secs': 0.337782} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.913263] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1861.913918] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.914108] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.914436] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1861.914692] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df7ec338-2e8f-4d00-8b05-cf0239c6f8b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.919359] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1861.919359] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52105205-9f1c-f182-f8b7-ee6133950bff" [ 1861.919359] env[62816]: _type = "Task" [ 1861.919359] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.926711] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52105205-9f1c-f182-f8b7-ee6133950bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.067940] env[62816]: DEBUG oslo_vmware.api [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789122, 'name': ReconfigVM_Task, 'duration_secs': 0.309453} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.068270] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c/9745413b-2bd8-45d7-8491-483e4921b59c.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1862.068596] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1862.135253] env[62816]: DEBUG nova.network.neutron [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Successfully created port: 102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1862.235218] env[62816]: DEBUG nova.compute.utils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1862.289928] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1862.341128] env[62816]: DEBUG nova.network.neutron [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updated VIF entry in instance network info cache for port e9b1f47c-7b77-4707-a285-7130979eca9e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.341128] env[62816]: DEBUG nova.network.neutron [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updating instance_info_cache with network_info: [{"id": "e9b1f47c-7b77-4707-a285-7130979eca9e", "address": "fa:16:3e:ca:54:21", "network": {"id": "7a6b749a-f038-4d2e-b502-671925db60d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1409965876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b4ced24cf2a4d88ba462e2f9685af14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9b1f47c-7b", "ovs_interfaceid": "e9b1f47c-7b77-4707-a285-7130979eca9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.435931] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52105205-9f1c-f182-f8b7-ee6133950bff, 'name': SearchDatastore_Task, 'duration_secs': 0.009523} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.438391] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.438735] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1862.439085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.439351] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.439650] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1862.440739] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb062631-c2a1-46ec-ad0c-142d6dad9526 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.448669] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1862.448861] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1862.451517] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75ba54f3-b6f3-4f96-98ad-45b53ac5b1a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.456594] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1862.456594] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207a83c-6451-539b-d576-0f1008085f20" [ 1862.456594] env[62816]: _type = "Task" [ 1862.456594] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.463951] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5207a83c-6451-539b-d576-0f1008085f20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.474824] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d4fb8d-04e3-453e-ad34-9c8ba50960a3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.480994] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f218c3-43c2-4116-bb8f-c03823af852c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.510963] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd31c11-c1cb-49bb-8375-20bc1bc480aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.517738] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e463210-0509-4ea0-be5e-74a31e7a22da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.530565] env[62816]: DEBUG nova.compute.provider_tree [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1862.575121] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a0bc49-2eec-4007-ba82-29aefe1e1190 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.594424] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da0acdcd-2bb3-4451-880d-1e1072069e64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.612826] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1862.738671] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1862.843930] env[62816]: DEBUG oslo_concurrency.lockutils [req-85f1d81e-0715-4b2f-815d-49ff0b13f2ff req-d7a523ea-dee6-4b5d-87ca-b5d256903337 service nova] Releasing lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.967683] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5207a83c-6451-539b-d576-0f1008085f20, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.968502] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeb5297d-5c07-4f4c-b6ed-31bf16522fd5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.973802] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1862.973802] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52dd3e7f-9b85-f0f5-656b-bb89620e6956" [ 1862.973802] env[62816]: _type = "Task" [ 1862.973802] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.981819] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd3e7f-9b85-f0f5-656b-bb89620e6956, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.034175] env[62816]: DEBUG nova.scheduler.client.report [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1863.151576] env[62816]: DEBUG nova.network.neutron [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Port c924d6c0-d5cc-40a9-b561-9393a5f71201 binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1863.302094] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1863.329413] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1863.329693] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1863.329857] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1863.330067] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1863.330214] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1863.330369] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1863.330612] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1863.330732] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1863.330899] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1863.331198] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1863.331442] env[62816]: DEBUG nova.virt.hardware [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1863.332536] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b7e1f0-0e8a-4688-9bab-39a5110407fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.340859] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd99a087-e119-46bd-891a-c14b73c45b81 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.484203] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52dd3e7f-9b85-f0f5-656b-bb89620e6956, 'name': SearchDatastore_Task, 'duration_secs': 0.009847} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.484553] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.484788] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 37cb03ea-2e94-4466-89c0-2e3f7fdac076/37cb03ea-2e94-4466-89c0-2e3f7fdac076.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1863.484971] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34eadd88-2d84-439e-a47a-743c7de3bb9e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.491550] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1863.491550] env[62816]: value = "task-1789123" [ 1863.491550] env[62816]: _type = "Task" [ 1863.491550] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.499336] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789123, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.539415] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.540046] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1863.642931] env[62816]: DEBUG nova.compute.manager [req-0aa6c781-0db8-435b-8824-58237834dd3b req-f52eebb3-af1c-417e-a16e-80ba0d037579 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Received event network-vif-plugged-102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1863.643176] env[62816]: DEBUG oslo_concurrency.lockutils [req-0aa6c781-0db8-435b-8824-58237834dd3b req-f52eebb3-af1c-417e-a16e-80ba0d037579 service nova] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.643391] env[62816]: DEBUG oslo_concurrency.lockutils [req-0aa6c781-0db8-435b-8824-58237834dd3b req-f52eebb3-af1c-417e-a16e-80ba0d037579 service nova] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.643563] env[62816]: DEBUG oslo_concurrency.lockutils [req-0aa6c781-0db8-435b-8824-58237834dd3b req-f52eebb3-af1c-417e-a16e-80ba0d037579 service nova] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.643732] env[62816]: DEBUG nova.compute.manager [req-0aa6c781-0db8-435b-8824-58237834dd3b req-f52eebb3-af1c-417e-a16e-80ba0d037579 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] No waiting events found dispatching network-vif-plugged-102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1863.643902] env[62816]: WARNING nova.compute.manager [req-0aa6c781-0db8-435b-8824-58237834dd3b req-f52eebb3-af1c-417e-a16e-80ba0d037579 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Received unexpected event network-vif-plugged-102f4b78-99cb-46f4-9305-2bec7ba02d1d for instance with vm_state building and task_state spawning. [ 1863.733106] env[62816]: DEBUG nova.network.neutron [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Successfully updated port: 102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1863.801991] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1863.802783] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1863.803067] env[62816]: INFO nova.compute.manager [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Attaching volume 3a5017ec-1e99-4460-aaa9-52093148a0df to /dev/sdc [ 1863.838985] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8343985-5c7e-49d4-a21d-1103d77894e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.847660] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e949f949-c751-408c-9fc9-f240d060d40d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.862665] env[62816]: DEBUG nova.virt.block_device [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating existing volume attachment record: f3060278-b4f2-44c7-bb81-e9ee159861d8 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1864.001590] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789123, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460406} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.001854] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 37cb03ea-2e94-4466-89c0-2e3f7fdac076/37cb03ea-2e94-4466-89c0-2e3f7fdac076.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1864.002282] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1864.002391] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-84983c9e-cdcc-464d-8e15-5821001c73d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.009592] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1864.009592] env[62816]: value = "task-1789124" [ 1864.009592] env[62816]: _type = "Task" [ 1864.009592] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.017275] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789124, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.047023] env[62816]: DEBUG nova.compute.utils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1864.048591] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1864.048806] env[62816]: DEBUG nova.network.neutron [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1864.115283] env[62816]: DEBUG nova.policy [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1864.176303] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.176566] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.176756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.236283] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.236531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.236632] env[62816]: DEBUG nova.network.neutron [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1864.410181] env[62816]: DEBUG nova.network.neutron [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Successfully created port: 951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1864.520461] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789124, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066927} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.520779] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1864.521605] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c4ce3d-a1ba-42d9-b980-5b743160aac8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.543547] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 37cb03ea-2e94-4466-89c0-2e3f7fdac076/37cb03ea-2e94-4466-89c0-2e3f7fdac076.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1864.543819] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ba438bd-bb64-42b9-b64c-e20890936ef1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.560798] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1864.572855] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1864.572855] env[62816]: value = "task-1789126" [ 1864.572855] env[62816]: _type = "Task" [ 1864.572855] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.582230] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789126, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.783389] env[62816]: DEBUG nova.network.neutron [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1864.960552] env[62816]: DEBUG nova.network.neutron [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updating instance_info_cache with network_info: [{"id": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "address": "fa:16:3e:d4:14:13", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102f4b78-99", "ovs_interfaceid": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.082963] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789126, 'name': ReconfigVM_Task, 'duration_secs': 0.287901} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.083821] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 37cb03ea-2e94-4466-89c0-2e3f7fdac076/37cb03ea-2e94-4466-89c0-2e3f7fdac076.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1865.083887] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d6467f1-a3f1-42c3-9d35-af1dce382ce7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.090704] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1865.090704] env[62816]: value = "task-1789127" [ 1865.090704] env[62816]: _type = "Task" [ 1865.090704] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.100028] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789127, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.211234] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.211503] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.211695] env[62816]: DEBUG nova.network.neutron [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.228067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.228316] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.228523] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.228708] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.228879] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.230906] env[62816]: INFO nova.compute.manager [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Terminating instance [ 1865.232772] env[62816]: DEBUG nova.compute.manager [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1865.232973] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1865.233829] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22942970-96df-444b-ac03-c0c77b5446f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.242351] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1865.242561] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b866bf9-acde-48b5-9d0d-161ec44caa27 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.249583] env[62816]: DEBUG oslo_vmware.api [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1865.249583] env[62816]: value = "task-1789128" [ 1865.249583] env[62816]: _type = "Task" [ 1865.249583] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.257806] env[62816]: DEBUG oslo_vmware.api [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.463380] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.463762] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Instance network_info: |[{"id": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "address": "fa:16:3e:d4:14:13", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102f4b78-99", "ovs_interfaceid": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1865.464285] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:14:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '32028d02-abaa-4071-bc65-1460f5c772a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '102f4b78-99cb-46f4-9305-2bec7ba02d1d', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1865.472118] env[62816]: DEBUG oslo.service.loopingcall [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.472354] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1865.472644] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c6de9eb-8c30-4202-a0a6-bf9373afbdc5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.492860] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1865.492860] env[62816]: value = "task-1789129" [ 1865.492860] env[62816]: _type = "Task" [ 1865.492860] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.504888] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789129, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.571125] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1865.597033] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1865.597296] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1865.597457] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1865.597640] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1865.597787] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1865.597934] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1865.598159] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1865.598325] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1865.598494] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1865.598660] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1865.598899] env[62816]: DEBUG nova.virt.hardware [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1865.599685] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef151c82-fc6b-460b-8a7e-4a4e89006bd0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.610255] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789127, 'name': Rename_Task, 'duration_secs': 0.136132} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.611436] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243f0153-65e5-47dc-bed6-dd90c78225d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.614988] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1865.615227] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d65d4ab-598f-4698-a6b4-60ad494ad02d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.627877] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1865.627877] env[62816]: value = "task-1789130" [ 1865.627877] env[62816]: _type = "Task" [ 1865.627877] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.638578] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789130, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.670374] env[62816]: DEBUG nova.compute.manager [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Received event network-changed-102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1865.670554] env[62816]: DEBUG nova.compute.manager [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Refreshing instance network info cache due to event network-changed-102f4b78-99cb-46f4-9305-2bec7ba02d1d. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1865.670717] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] Acquiring lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.670866] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] Acquired lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.671045] env[62816]: DEBUG nova.network.neutron [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Refreshing network info cache for port 102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1865.760583] env[62816]: DEBUG oslo_vmware.api [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789128, 'name': PowerOffVM_Task, 'duration_secs': 0.212963} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.760931] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1865.761199] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1865.761511] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ede15912-9ae0-4d8e-b07b-731c4cfb333c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.855095] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1865.855274] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1865.855463] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleting the datastore file [datastore1] 543d69d2-0694-4d57-bbae-f8851ff0f0dc {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1865.855795] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dbfe2868-a9e9-448a-bdd9-11888ac33bff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.862415] env[62816]: DEBUG oslo_vmware.api [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1865.862415] env[62816]: value = "task-1789132" [ 1865.862415] env[62816]: _type = "Task" [ 1865.862415] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.870267] env[62816]: DEBUG oslo_vmware.api [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.940266] env[62816]: DEBUG nova.network.neutron [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.976388] env[62816]: DEBUG nova.network.neutron [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Successfully updated port: 951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1866.004688] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789129, 'name': CreateVM_Task, 'duration_secs': 0.387354} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.005040] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1866.005822] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.006137] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.006555] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1866.006918] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ae91477-63ab-45e2-aaad-182468f0ccce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.012245] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1866.012245] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5207fd36-973d-b1bf-1ec3-34f3175dbba0" [ 1866.012245] env[62816]: _type = "Task" [ 1866.012245] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.021266] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5207fd36-973d-b1bf-1ec3-34f3175dbba0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.140043] env[62816]: DEBUG oslo_vmware.api [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789130, 'name': PowerOnVM_Task, 'duration_secs': 0.470829} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.140043] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1866.140043] env[62816]: INFO nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1866.140043] env[62816]: DEBUG nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1866.140043] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c72fd0-dc8d-4db2-8421-ee8b070e2e6c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.372126] env[62816]: DEBUG oslo_vmware.api [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192741} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.372416] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1866.372624] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1866.372811] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1866.372985] env[62816]: INFO nova.compute.manager [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1866.373251] env[62816]: DEBUG oslo.service.loopingcall [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.373445] env[62816]: DEBUG nova.compute.manager [-] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1866.373963] env[62816]: DEBUG nova.network.neutron [-] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1866.433336] env[62816]: DEBUG nova.network.neutron [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updated VIF entry in instance network info cache for port 102f4b78-99cb-46f4-9305-2bec7ba02d1d. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1866.433722] env[62816]: DEBUG nova.network.neutron [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updating instance_info_cache with network_info: [{"id": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "address": "fa:16:3e:d4:14:13", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102f4b78-99", "ovs_interfaceid": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.443290] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.482044] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.482280] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.482432] env[62816]: DEBUG nova.network.neutron [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1866.522896] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5207fd36-973d-b1bf-1ec3-34f3175dbba0, 'name': SearchDatastore_Task, 'duration_secs': 0.009838} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.523264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.523592] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1866.523857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.524019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.524212] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.524471] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-caa03cad-1e1c-40e7-bfb4-dfd325826882 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.532965] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.533203] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1866.533952] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5fd9a80-230a-48b1-97ab-b68b6fafa3d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.539364] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1866.539364] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5267d754-c733-f6e7-6369-2061ef6fd3c4" [ 1866.539364] env[62816]: _type = "Task" [ 1866.539364] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.547690] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5267d754-c733-f6e7-6369-2061ef6fd3c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.659412] env[62816]: INFO nova.compute.manager [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Took 12.36 seconds to build instance. [ 1866.701617] env[62816]: DEBUG nova.compute.manager [req-75297205-d7f0-4d43-96c6-bd095e989900 req-a609f010-b26f-47cd-a042-764a83d6a719 service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Received event network-vif-deleted-7a7060d4-14aa-43c8-9359-52512eee6df8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1866.701817] env[62816]: INFO nova.compute.manager [req-75297205-d7f0-4d43-96c6-bd095e989900 req-a609f010-b26f-47cd-a042-764a83d6a719 service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Neutron deleted interface 7a7060d4-14aa-43c8-9359-52512eee6df8; detaching it from the instance and deleting it from the info cache [ 1866.701985] env[62816]: DEBUG nova.network.neutron [req-75297205-d7f0-4d43-96c6-bd095e989900 req-a609f010-b26f-47cd-a042-764a83d6a719 service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.936705] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3a4a94c-dddc-4f6e-9c29-1373ca34d6fa req-c3c6cbdf-e1fb-45e1-a109-95c1f69d2e5b service nova] Releasing lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.973435] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b2fb0e-f588-4b76-8209-d78b6badd01f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.001632] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8c9322-cc89-4159-9ce4-463a2111b0ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.011721] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1867.038060] env[62816]: DEBUG nova.network.neutron [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1867.049285] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5267d754-c733-f6e7-6369-2061ef6fd3c4, 'name': SearchDatastore_Task, 'duration_secs': 0.008974} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.050074] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-789dc5ba-b280-4556-b295-6ddb42b1d6af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.055092] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1867.055092] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a98c33-2e5d-70f8-30ea-daebcf178961" [ 1867.055092] env[62816]: _type = "Task" [ 1867.055092] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.064188] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a98c33-2e5d-70f8-30ea-daebcf178961, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.161886] env[62816]: DEBUG oslo_concurrency.lockutils [None req-dda0d1ee-1d79-441b-9c2a-7caeb5ac78c0 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.871s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.178209] env[62816]: DEBUG nova.network.neutron [-] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.204794] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8d747b4-d3cb-453c-85f1-09c89b32538a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.208540] env[62816]: DEBUG nova.network.neutron [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.215580] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4f9b98-16f6-432f-8b4e-d821137b8525 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.246430] env[62816]: DEBUG nova.compute.manager [req-75297205-d7f0-4d43-96c6-bd095e989900 req-a609f010-b26f-47cd-a042-764a83d6a719 service nova] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Detach interface failed, port_id=7a7060d4-14aa-43c8-9359-52512eee6df8, reason: Instance 543d69d2-0694-4d57-bbae-f8851ff0f0dc could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1867.521227] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d29961bb-6a64-4cfd-a869-e941a3f16693 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance '9745413b-2bd8-45d7-8491-483e4921b59c' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1867.565822] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a98c33-2e5d-70f8-30ea-daebcf178961, 'name': SearchDatastore_Task, 'duration_secs': 0.010122} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.566106] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.566386] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 6f0c72ab-1eaf-4db5-842f-b0ba75739e66/6f0c72ab-1eaf-4db5-842f-b0ba75739e66.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1867.566673] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd819c91-41e5-4fc2-a0bb-0d78a48f95c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.574899] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1867.574899] env[62816]: value = "task-1789134" [ 1867.574899] env[62816]: _type = "Task" [ 1867.574899] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.583815] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789134, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.680908] env[62816]: INFO nova.compute.manager [-] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Took 1.31 seconds to deallocate network for instance. [ 1867.696499] env[62816]: DEBUG nova.compute.manager [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-vif-plugged-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1867.696702] env[62816]: DEBUG oslo_concurrency.lockutils [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.696954] env[62816]: DEBUG oslo_concurrency.lockutils [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.697160] env[62816]: DEBUG oslo_concurrency.lockutils [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.697354] env[62816]: DEBUG nova.compute.manager [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] No waiting events found dispatching network-vif-plugged-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1867.697534] env[62816]: WARNING nova.compute.manager [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received unexpected event network-vif-plugged-951e7a24-5179-43e6-b530-4769ba0ffdb4 for instance with vm_state building and task_state spawning. [ 1867.697702] env[62816]: DEBUG nova.compute.manager [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1867.697876] env[62816]: DEBUG nova.compute.manager [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing instance network info cache due to event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1867.698361] env[62816]: DEBUG oslo_concurrency.lockutils [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.711359] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.711746] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Instance network_info: |[{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1867.712074] env[62816]: DEBUG oslo_concurrency.lockutils [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.712306] env[62816]: DEBUG nova.network.neutron [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1867.713992] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:ac:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '951e7a24-5179-43e6-b530-4769ba0ffdb4', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1867.722869] env[62816]: DEBUG oslo.service.loopingcall [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1867.724304] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1867.724537] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ddb3733f-5abc-43d7-94ba-1728ebd96f62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.743897] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1867.743897] env[62816]: value = "task-1789135" [ 1867.743897] env[62816]: _type = "Task" [ 1867.743897] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.751517] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789135, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.085993] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789134, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.188380] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.188668] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.188906] env[62816]: DEBUG nova.objects.instance [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'resources' on Instance uuid 543d69d2-0694-4d57-bbae-f8851ff0f0dc {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.255935] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789135, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.412745] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1868.412994] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371181', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'name': 'volume-3a5017ec-1e99-4460-aaa9-52093148a0df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'serial': '3a5017ec-1e99-4460-aaa9-52093148a0df'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1868.413872] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b26627-b79a-43eb-bb2c-579c71fa4eb3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.432133] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b3989a-370e-4958-bde8-e08a177dade2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.459075] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] volume-3a5017ec-1e99-4460-aaa9-52093148a0df/volume-3a5017ec-1e99-4460-aaa9-52093148a0df.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1868.460084] env[62816]: DEBUG nova.network.neutron [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updated VIF entry in instance network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1868.460473] env[62816]: DEBUG nova.network.neutron [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.462278] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9324de22-43e1-49c3-b2cd-9b5e7d6bbb83 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.482012] env[62816]: DEBUG oslo_vmware.api [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1868.482012] env[62816]: value = "task-1789136" [ 1868.482012] env[62816]: _type = "Task" [ 1868.482012] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.490314] env[62816]: DEBUG oslo_vmware.api [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789136, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.586693] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789134, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530297} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.587040] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 6f0c72ab-1eaf-4db5-842f-b0ba75739e66/6f0c72ab-1eaf-4db5-842f-b0ba75739e66.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1868.587299] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1868.587606] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a10b05e4-38e2-43e1-aa7c-96d5c1c3ec3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.594656] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1868.594656] env[62816]: value = "task-1789137" [ 1868.594656] env[62816]: _type = "Task" [ 1868.594656] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.603639] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789137, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.727848] env[62816]: DEBUG nova.compute.manager [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Received event network-changed-e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1868.728126] env[62816]: DEBUG nova.compute.manager [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Refreshing instance network info cache due to event network-changed-e9b1f47c-7b77-4707-a285-7130979eca9e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1868.728412] env[62816]: DEBUG oslo_concurrency.lockutils [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] Acquiring lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.728614] env[62816]: DEBUG oslo_concurrency.lockutils [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] Acquired lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.728851] env[62816]: DEBUG nova.network.neutron [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Refreshing network info cache for port e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1868.755203] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789135, 'name': CreateVM_Task, 'duration_secs': 0.634129} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.757485] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1868.758332] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.758469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.758778] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1868.759045] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-636f0894-7eae-4cc3-8d25-8e852d1c3630 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.763911] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1868.763911] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b9a741-ea54-6c6f-624e-d716207a7a55" [ 1868.763911] env[62816]: _type = "Task" [ 1868.763911] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.773373] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b9a741-ea54-6c6f-624e-d716207a7a55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.853865] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f70f53e-62f1-4fe8-9d7c-68e4aa2f5e73 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.861387] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b7d259-6bae-4601-84f5-b6fee500c4ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.893803] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886a9a7b-1e5a-48dd-868b-a41b79914611 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.901013] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f7d238-907c-4ce6-b9ec-16bc4c1fa82d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.914270] env[62816]: DEBUG nova.compute.provider_tree [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.977195] env[62816]: DEBUG oslo_concurrency.lockutils [req-e6eb2bc7-131a-459c-aed8-1f13196d64f5 req-a0fad010-0c3e-4166-bc30-375f55c42f42 service nova] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.991373] env[62816]: DEBUG oslo_vmware.api [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789136, 'name': ReconfigVM_Task, 'duration_secs': 0.360095} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.991691] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfigured VM instance instance-00000047 to attach disk [datastore1] volume-3a5017ec-1e99-4460-aaa9-52093148a0df/volume-3a5017ec-1e99-4460-aaa9-52093148a0df.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1868.996736] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d2e659c-9833-4617-a482-6b79664cc536 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.011669] env[62816]: DEBUG oslo_vmware.api [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1869.011669] env[62816]: value = "task-1789138" [ 1869.011669] env[62816]: _type = "Task" [ 1869.011669] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.019222] env[62816]: DEBUG oslo_vmware.api [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789138, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.104670] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789137, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074644} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.104896] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1869.105919] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143700d9-7ba6-4aad-9661-09f59c4c8b11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.128830] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 6f0c72ab-1eaf-4db5-842f-b0ba75739e66/6f0c72ab-1eaf-4db5-842f-b0ba75739e66.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1869.129527] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-254a613a-ebf3-47e4-9d50-bd5c6794af9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.150057] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1869.150057] env[62816]: value = "task-1789139" [ 1869.150057] env[62816]: _type = "Task" [ 1869.150057] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.159103] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.274422] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b9a741-ea54-6c6f-624e-d716207a7a55, 'name': SearchDatastore_Task, 'duration_secs': 0.010523} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.278049] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.278176] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1869.278422] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.278574] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.278753] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.283029] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b89bab86-19d8-4c1a-8224-d5f05e974162 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.294342] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.294538] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1869.295260] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cdf08c1-968e-4737-a357-2d12df0c5843 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.301622] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1869.301622] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e54a95-63d3-50e6-8fe0-39449206cf97" [ 1869.301622] env[62816]: _type = "Task" [ 1869.301622] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.310185] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e54a95-63d3-50e6-8fe0-39449206cf97, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.416750] env[62816]: DEBUG nova.scheduler.client.report [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1869.510645] env[62816]: DEBUG nova.network.neutron [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updated VIF entry in instance network info cache for port e9b1f47c-7b77-4707-a285-7130979eca9e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1869.511032] env[62816]: DEBUG nova.network.neutron [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updating instance_info_cache with network_info: [{"id": "e9b1f47c-7b77-4707-a285-7130979eca9e", "address": "fa:16:3e:ca:54:21", "network": {"id": "7a6b749a-f038-4d2e-b502-671925db60d1", "bridge": "br-int", "label": "tempest-ServersTestJSON-1409965876-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b4ced24cf2a4d88ba462e2f9685af14", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57d4be17-536f-4a81-bea9-6547bd50f4a3", "external-id": "nsx-vlan-transportzone-163", "segmentation_id": 163, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape9b1f47c-7b", "ovs_interfaceid": "e9b1f47c-7b77-4707-a285-7130979eca9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.521580] env[62816]: DEBUG oslo_vmware.api [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789138, 'name': ReconfigVM_Task, 'duration_secs': 0.139637} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.521928] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371181', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'name': 'volume-3a5017ec-1e99-4460-aaa9-52093148a0df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'serial': '3a5017ec-1e99-4460-aaa9-52093148a0df'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1869.661664] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.783435] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.783730] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.784055] env[62816]: DEBUG nova.compute.manager [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Going to confirm migration 3 {{(pid=62816) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1869.813368] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e54a95-63d3-50e6-8fe0-39449206cf97, 'name': SearchDatastore_Task, 'duration_secs': 0.039014} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.814312] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-906413ef-a890-4e26-956d-0d2c9f7315dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.819267] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1869.819267] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e3e539-4536-4b8b-71d2-673c0bd0b89d" [ 1869.819267] env[62816]: _type = "Task" [ 1869.819267] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.827189] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e3e539-4536-4b8b-71d2-673c0bd0b89d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.922034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.940016] env[62816]: INFO nova.scheduler.client.report [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleted allocations for instance 543d69d2-0694-4d57-bbae-f8851ff0f0dc [ 1870.017280] env[62816]: DEBUG oslo_concurrency.lockutils [req-13d459f8-52fb-4780-a99f-893d3a3169e0 req-125345b7-c533-4f04-86ef-50b2fc8ce1da service nova] Releasing lock "refresh_cache-37cb03ea-2e94-4466-89c0-2e3f7fdac076" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.161985] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.329820] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e3e539-4536-4b8b-71d2-673c0bd0b89d, 'name': SearchDatastore_Task, 'duration_secs': 0.015864} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.331107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.331367] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 251b3ce3-06a4-40d4-ba18-a217650c9152/251b3ce3-06a4-40d4-ba18-a217650c9152.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1870.333893] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a93280a-09d9-4420-b97c-5fb1f1ec2919 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.341710] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1870.341710] env[62816]: value = "task-1789140" [ 1870.341710] env[62816]: _type = "Task" [ 1870.341710] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.350818] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.402881] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.404056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.404056] env[62816]: DEBUG nova.network.neutron [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1870.404056] env[62816]: DEBUG nova.objects.instance [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'info_cache' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1870.448662] env[62816]: DEBUG oslo_concurrency.lockutils [None req-61bc5170-800c-466f-be5a-13c8a40fd1af tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "543d69d2-0694-4d57-bbae-f8851ff0f0dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.220s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.559429] env[62816]: DEBUG nova.objects.instance [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1870.663262] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789139, 'name': ReconfigVM_Task, 'duration_secs': 1.082023} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.663611] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 6f0c72ab-1eaf-4db5-842f-b0ba75739e66/6f0c72ab-1eaf-4db5-842f-b0ba75739e66.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1870.664345] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c55caaf-bc1e-4cdb-a77a-35e7844ee403 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.672189] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1870.672189] env[62816]: value = "task-1789141" [ 1870.672189] env[62816]: _type = "Task" [ 1870.672189] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.685056] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789141, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.837300] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.837580] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.839743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "c66fa160-d4dd-429f-8751-f36cb2020ff1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.839743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.839743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.841882] env[62816]: INFO nova.compute.manager [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Terminating instance [ 1870.846982] env[62816]: DEBUG nova.compute.manager [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1870.847186] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1870.847979] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d733e9-7ab3-4de5-8b36-7fd7d2696694 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.856477] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468373} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.858718] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 251b3ce3-06a4-40d4-ba18-a217650c9152/251b3ce3-06a4-40d4-ba18-a217650c9152.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1870.859016] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1870.859296] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1870.859505] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27d6aae9-4c8d-44b0-9f1d-dc057a56fe57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.861182] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0eb4ccc4-40f3-4a1a-9ff0-ce29eec35293 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.867410] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1870.867410] env[62816]: value = "task-1789142" [ 1870.867410] env[62816]: _type = "Task" [ 1870.867410] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.871157] env[62816]: DEBUG oslo_vmware.api [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1870.871157] env[62816]: value = "task-1789143" [ 1870.871157] env[62816]: _type = "Task" [ 1870.871157] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.877130] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.883898] env[62816]: DEBUG oslo_vmware.api [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.065461] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ad9d2c35-59cf-4c08-9bed-1912be6e7b19 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.263s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.182185] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789141, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.380703] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117224} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.383616] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1871.383918] env[62816]: DEBUG oslo_vmware.api [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789143, 'name': PowerOffVM_Task, 'duration_secs': 0.22397} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.384612] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9951b9d7-1c7b-4dcb-96f6-1504c26bf7be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.386902] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1871.387124] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1871.387354] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd1cdabd-75fe-4bc3-93ec-70940923de19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.401746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.401960] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.412034] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 251b3ce3-06a4-40d4-ba18-a217650c9152/251b3ce3-06a4-40d4-ba18-a217650c9152.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1871.413881] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24c81eed-cfa2-4900-9840-5b97d7168fae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.435276] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1871.435276] env[62816]: value = "task-1789145" [ 1871.435276] env[62816]: _type = "Task" [ 1871.435276] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.444697] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.503088] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1871.503422] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1871.503680] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleting the datastore file [datastore1] c66fa160-d4dd-429f-8751-f36cb2020ff1 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.504026] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58732920-74a9-4401-918e-7f56807d6ffb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.512551] env[62816]: DEBUG oslo_vmware.api [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for the task: (returnval){ [ 1871.512551] env[62816]: value = "task-1789146" [ 1871.512551] env[62816]: _type = "Task" [ 1871.512551] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.521054] env[62816]: DEBUG oslo_vmware.api [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789146, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.682048] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789141, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.913705] env[62816]: INFO nova.compute.manager [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Detaching volume 8de9eb81-0f36-4069-9c8b-8bf875c75c98 [ 1871.932682] env[62816]: DEBUG nova.network.neutron [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.946850] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789145, 'name': ReconfigVM_Task, 'duration_secs': 0.295445} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.946850] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 251b3ce3-06a4-40d4-ba18-a217650c9152/251b3ce3-06a4-40d4-ba18-a217650c9152.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1871.947533] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6411150c-b75d-4dd4-b764-1a2669f7b166 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.950817] env[62816]: INFO nova.virt.block_device [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Attempting to driver detach volume 8de9eb81-0f36-4069-9c8b-8bf875c75c98 from mountpoint /dev/sdb [ 1871.951046] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1871.951238] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371177', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'name': 'volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'serial': '8de9eb81-0f36-4069-9c8b-8bf875c75c98'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1871.952677] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cddca7-c62a-41bd-9d38-1e7cfb0a692e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.957156] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1871.957156] env[62816]: value = "task-1789147" [ 1871.957156] env[62816]: _type = "Task" [ 1871.957156] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.979867] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee349ba-191a-4cec-8581-9721b40b368f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.985869] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789147, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.990442] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3455f639-7fb1-4f50-84ec-ed8bd8fa204a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.018301] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d614ea84-3679-4284-8312-736f5ed9b045 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.027369] env[62816]: DEBUG oslo_vmware.api [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Task: {'id': task-1789146, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167927} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.037789] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1872.038029] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1872.038231] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1872.038409] env[62816]: INFO nova.compute.manager [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1872.038673] env[62816]: DEBUG oslo.service.loopingcall [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.038916] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] The volume has not been displaced from its original location: [datastore1] volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98/volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1872.044297] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfiguring VM instance instance-00000047 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1872.044595] env[62816]: DEBUG nova.compute.manager [-] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1872.044697] env[62816]: DEBUG nova.network.neutron [-] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1872.046384] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0002015-c0b2-40cb-83e9-99f3a249b78e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.064581] env[62816]: DEBUG oslo_vmware.api [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1872.064581] env[62816]: value = "task-1789148" [ 1872.064581] env[62816]: _type = "Task" [ 1872.064581] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.072673] env[62816]: DEBUG oslo_vmware.api [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789148, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.182466] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789141, 'name': Rename_Task, 'duration_secs': 1.201913} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.182782] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1872.183064] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6a5380e-042c-4415-9a58-2f4c2a8507f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.188952] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1872.188952] env[62816]: value = "task-1789149" [ 1872.188952] env[62816]: _type = "Task" [ 1872.188952] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.201603] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.284428] env[62816]: DEBUG nova.compute.manager [req-d0f7bf3a-2bef-40d3-9881-bbcf05598678 req-5b079d80-4089-4756-904e-799b367f1289 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Received event network-vif-deleted-3b5a67df-8153-4be4-8afe-6bd3ae2b807b {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1872.284609] env[62816]: INFO nova.compute.manager [req-d0f7bf3a-2bef-40d3-9881-bbcf05598678 req-5b079d80-4089-4756-904e-799b367f1289 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Neutron deleted interface 3b5a67df-8153-4be4-8afe-6bd3ae2b807b; detaching it from the instance and deleting it from the info cache [ 1872.284801] env[62816]: DEBUG nova.network.neutron [req-d0f7bf3a-2bef-40d3-9881-bbcf05598678 req-5b079d80-4089-4756-904e-799b367f1289 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.435985] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.436343] env[62816]: DEBUG nova.objects.instance [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'migration_context' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1872.467049] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789147, 'name': Rename_Task, 'duration_secs': 0.142525} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.467338] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1872.467588] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d66bf09-83cd-41e7-b470-60d660fd756f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.475149] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1872.475149] env[62816]: value = "task-1789150" [ 1872.475149] env[62816]: _type = "Task" [ 1872.475149] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.483124] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.574948] env[62816]: DEBUG oslo_vmware.api [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789148, 'name': ReconfigVM_Task, 'duration_secs': 0.48409} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.575258] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfigured VM instance instance-00000047 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1872.580185] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c4b10ec-fcce-4e62-8e44-14546177b95a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.595327] env[62816]: DEBUG oslo_vmware.api [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1872.595327] env[62816]: value = "task-1789151" [ 1872.595327] env[62816]: _type = "Task" [ 1872.595327] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.603745] env[62816]: DEBUG oslo_vmware.api [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.700166] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789149, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.763548] env[62816]: DEBUG nova.network.neutron [-] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.787851] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddafc292-624f-4031-8d2d-2742fa5db616 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.797503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b02010-bea5-4e5c-b376-d95b9b825874 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.831135] env[62816]: DEBUG nova.compute.manager [req-d0f7bf3a-2bef-40d3-9881-bbcf05598678 req-5b079d80-4089-4756-904e-799b367f1289 service nova] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Detach interface failed, port_id=3b5a67df-8153-4be4-8afe-6bd3ae2b807b, reason: Instance c66fa160-d4dd-429f-8751-f36cb2020ff1 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1872.940211] env[62816]: DEBUG nova.objects.base [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Object Instance<9745413b-2bd8-45d7-8491-483e4921b59c> lazy-loaded attributes: info_cache,migration_context {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1872.942147] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9a756d-e144-49a1-9cf8-36302dd88783 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.974849] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9ced946-8453-4ddc-9a63-43e3b5002f2b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.987159] env[62816]: DEBUG oslo_vmware.api [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1872.987159] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ff29ce-43c1-5439-03b5-7cc041bccc26" [ 1872.987159] env[62816]: _type = "Task" [ 1872.987159] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1872.992581] env[62816]: DEBUG oslo_vmware.api [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789150, 'name': PowerOnVM_Task, 'duration_secs': 0.490048} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1872.997338] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1872.997710] env[62816]: INFO nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1872.998042] env[62816]: DEBUG nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1872.999227] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8567ccaa-d625-4864-9e7e-df703ae7ae58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.008475] env[62816]: DEBUG oslo_vmware.api [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ff29ce-43c1-5439-03b5-7cc041bccc26, 'name': SearchDatastore_Task, 'duration_secs': 0.008365} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.009998] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.010261] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.104992] env[62816]: DEBUG oslo_vmware.api [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789151, 'name': ReconfigVM_Task, 'duration_secs': 0.143382} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.105333] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371177', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'name': 'volume-8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '8de9eb81-0f36-4069-9c8b-8bf875c75c98', 'serial': '8de9eb81-0f36-4069-9c8b-8bf875c75c98'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1873.199776] env[62816]: DEBUG oslo_vmware.api [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789149, 'name': PowerOnVM_Task, 'duration_secs': 0.771978} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.199940] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1873.200154] env[62816]: INFO nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1873.200335] env[62816]: DEBUG nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1873.201093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc722b1-5ee6-473d-bec3-8a45461de8fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.267130] env[62816]: INFO nova.compute.manager [-] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Took 1.22 seconds to deallocate network for instance. [ 1873.525145] env[62816]: INFO nova.compute.manager [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Took 14.49 seconds to build instance. [ 1873.649218] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e722fb2b-8249-405f-a313-99c0cdadfbd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.655474] env[62816]: DEBUG nova.objects.instance [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1873.661736] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f728d7c1-c7c1-4bcf-af02-23fd8e98a9b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.692429] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0f02cd-fc1b-4188-83bb-4f454d70188d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.699774] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac071fcd-2836-4e92-8e05-f8b6fd63465e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.718436] env[62816]: DEBUG nova.compute.provider_tree [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1873.722203] env[62816]: INFO nova.compute.manager [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Took 19.01 seconds to build instance. [ 1873.773442] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.026531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-18eaa795-6b9b-43cc-a01f-e352637b6bc3 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.996s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.116860] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.117116] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.224806] env[62816]: DEBUG nova.scheduler.client.report [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1874.228688] env[62816]: DEBUG oslo_concurrency.lockutils [None req-905ad2a0-f162-4eb9-8d85-efb8c041b279 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.519s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.620381] env[62816]: DEBUG nova.compute.utils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1874.663130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0d7e721f-fc64-4091-8714-ed87511851fd tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.261s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.788021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.788333] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.801883] env[62816]: DEBUG nova.compute.manager [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Received event network-changed-102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1874.802090] env[62816]: DEBUG nova.compute.manager [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Refreshing instance network info cache due to event network-changed-102f4b78-99cb-46f4-9305-2bec7ba02d1d. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1874.802300] env[62816]: DEBUG oslo_concurrency.lockutils [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] Acquiring lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.802445] env[62816]: DEBUG oslo_concurrency.lockutils [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] Acquired lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.802630] env[62816]: DEBUG nova.network.neutron [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Refreshing network info cache for port 102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1875.106656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.107011] env[62816]: DEBUG oslo_concurrency.lockutils [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.111106] env[62816]: DEBUG nova.compute.manager [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1875.111314] env[62816]: DEBUG nova.compute.manager [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing instance network info cache due to event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1875.111520] env[62816]: DEBUG oslo_concurrency.lockutils [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1875.111698] env[62816]: DEBUG oslo_concurrency.lockutils [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.111868] env[62816]: DEBUG nova.network.neutron [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1875.123741] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.236067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.226s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.236311] env[62816]: DEBUG nova.compute.manager [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=62816) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 1875.239073] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.466s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.239302] env[62816]: DEBUG nova.objects.instance [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lazy-loading 'resources' on Instance uuid c66fa160-d4dd-429f-8751-f36cb2020ff1 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.293078] env[62816]: INFO nova.compute.manager [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Detaching volume 3a5017ec-1e99-4460-aaa9-52093148a0df [ 1875.333994] env[62816]: INFO nova.virt.block_device [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Attempting to driver detach volume 3a5017ec-1e99-4460-aaa9-52093148a0df from mountpoint /dev/sdc [ 1875.334258] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1875.334455] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371181', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'name': 'volume-3a5017ec-1e99-4460-aaa9-52093148a0df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'serial': '3a5017ec-1e99-4460-aaa9-52093148a0df'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1875.335357] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67374ef-83d8-4d7a-8638-3c27462e100c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.362403] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd441bce-c5e0-4f66-a1ea-30264bc5bd28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.370662] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0904f66b-6a71-42a7-a4a2-1ed20664dfb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.392860] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce28b2d-34b3-485b-a8f3-8785516c91f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.409282] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] The volume has not been displaced from its original location: [datastore1] volume-3a5017ec-1e99-4460-aaa9-52093148a0df/volume-3a5017ec-1e99-4460-aaa9-52093148a0df.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1875.414950] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfiguring VM instance instance-00000047 to detach disk 2002 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1875.417752] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d79a708a-8897-4848-b557-cc63db0357ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.438493] env[62816]: DEBUG oslo_vmware.api [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1875.438493] env[62816]: value = "task-1789152" [ 1875.438493] env[62816]: _type = "Task" [ 1875.438493] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.446896] env[62816]: DEBUG oslo_vmware.api [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789152, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.600393] env[62816]: DEBUG nova.network.neutron [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updated VIF entry in instance network info cache for port 102f4b78-99cb-46f4-9305-2bec7ba02d1d. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1875.600393] env[62816]: DEBUG nova.network.neutron [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updating instance_info_cache with network_info: [{"id": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "address": "fa:16:3e:d4:14:13", "network": {"id": "11fc3824-17d9-4b7e-94af-cc82b2f6d1cc", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1883545396-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef0dee852154407fa3201a860c55bf3c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "32028d02-abaa-4071-bc65-1460f5c772a8", "external-id": "nsx-vlan-transportzone-558", "segmentation_id": 558, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap102f4b78-99", "ovs_interfaceid": "102f4b78-99cb-46f4-9305-2bec7ba02d1d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.616818] env[62816]: INFO nova.compute.manager [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Detaching volume fce3ab81-3139-4d04-bdd8-7bec1dfedee2 [ 1875.662022] env[62816]: INFO nova.virt.block_device [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Attempting to driver detach volume fce3ab81-3139-4d04-bdd8-7bec1dfedee2 from mountpoint /dev/sdb [ 1875.662022] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1875.662022] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371164', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'name': 'volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'f97ea34e-792e-4023-bd2f-549dba129925', 'attached_at': '', 'detached_at': '', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'serial': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1875.662022] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344f9f31-583d-4aca-9dd0-ac246ffc28c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.695383] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bee8ee-b4ab-4cae-9160-2f20d0c22cc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.708013] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3752ff5-4e66-4a7f-ab5b-42d7cbc01259 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.736847] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24936031-5008-4f14-ac0b-d3ca5627b5a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.760735] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] The volume has not been displaced from its original location: [datastore1] volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2/volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1875.766431] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfiguring VM instance instance-00000050 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1875.773872] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe3dd9f3-b64b-42a3-8b69-dcb51e24b37c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.794879] env[62816]: DEBUG oslo_vmware.api [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1875.794879] env[62816]: value = "task-1789153" [ 1875.794879] env[62816]: _type = "Task" [ 1875.794879] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.803705] env[62816]: DEBUG oslo_vmware.api [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789153, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.830153] env[62816]: INFO nova.scheduler.client.report [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted allocation for migration 9c3bf0c0-67f1-4b43-8a77-c2212e13f9e5 [ 1875.899828] env[62816]: DEBUG nova.network.neutron [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updated VIF entry in instance network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1875.900204] env[62816]: DEBUG nova.network.neutron [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.949061] env[62816]: DEBUG oslo_vmware.api [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789152, 'name': ReconfigVM_Task, 'duration_secs': 0.233412} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.949614] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Reconfigured VM instance instance-00000047 to detach disk 2002 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1875.955363] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73e4bda5-597f-487e-a64d-b55c7903e051 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.965878] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86ca2201-7735-4f08-bcf3-1efa62b7b7b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.973349] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6085966e-10d8-4dea-ae79-21c4e9981c25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.977594] env[62816]: DEBUG oslo_vmware.api [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1875.977594] env[62816]: value = "task-1789154" [ 1875.977594] env[62816]: _type = "Task" [ 1875.977594] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.010451] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd301b3-325d-453f-a675-b6096f4be1bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.014337] env[62816]: DEBUG oslo_vmware.api [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789154, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.019506] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576dcc65-72af-4a53-9e4f-6b03ec64b3e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.035567] env[62816]: DEBUG nova.compute.provider_tree [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1876.105021] env[62816]: DEBUG oslo_concurrency.lockutils [req-191b0008-9590-488d-ae52-df693f731b03 req-f67f4f89-73cc-4248-b8dd-fffba4bc571a service nova] Releasing lock "refresh_cache-6f0c72ab-1eaf-4db5-842f-b0ba75739e66" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.203341] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.203627] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.204404] env[62816]: INFO nova.compute.manager [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Attaching volume c253876c-91e9-4c8f-b674-798739e9b116 to /dev/sdb [ 1876.236909] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b171a0-2739-4855-ae05-45af79efa2f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.246197] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea3429a-b60a-4ca4-9f04-03d0ff5c95ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.259220] env[62816]: DEBUG nova.virt.block_device [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating existing volume attachment record: f9138bdc-ea99-4505-a8f7-5dc9eaaabeb6 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1876.305139] env[62816]: DEBUG oslo_vmware.api [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789153, 'name': ReconfigVM_Task, 'duration_secs': 0.267897} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.306651] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Reconfigured VM instance instance-00000050 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1876.313381] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31abe97a-02a5-47a3-9fb7-8bacfffdf06f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.327223] env[62816]: DEBUG nova.objects.instance [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'flavor' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.333712] env[62816]: DEBUG oslo_vmware.api [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1876.333712] env[62816]: value = "task-1789155" [ 1876.333712] env[62816]: _type = "Task" [ 1876.333712] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.337639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d43b78ef-ec96-4cfc-bf42-df5fef9cd9c8 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.554s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.343889] env[62816]: DEBUG oslo_vmware.api [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789155, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.402683] env[62816]: DEBUG oslo_concurrency.lockutils [req-bd9a9288-f76f-4d3c-bc11-f4e326834cbe req-e55b5d80-c408-4779-96d5-8b470435d87c service nova] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.488332] env[62816]: DEBUG oslo_vmware.api [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789154, 'name': ReconfigVM_Task, 'duration_secs': 0.139495} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.488682] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371181', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'name': 'volume-3a5017ec-1e99-4460-aaa9-52093148a0df', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'd03ed540-5c20-4bcb-ac7e-eec8c09e4451', 'attached_at': '', 'detached_at': '', 'volume_id': '3a5017ec-1e99-4460-aaa9-52093148a0df', 'serial': '3a5017ec-1e99-4460-aaa9-52093148a0df'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1876.539417] env[62816]: DEBUG nova.scheduler.client.report [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1876.833815] env[62816]: DEBUG nova.compute.manager [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1876.834116] env[62816]: DEBUG nova.compute.manager [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing instance network info cache due to event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1876.834303] env[62816]: DEBUG oslo_concurrency.lockutils [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.834459] env[62816]: DEBUG oslo_concurrency.lockutils [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.834639] env[62816]: DEBUG nova.network.neutron [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1876.836418] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.836418] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.836418] env[62816]: DEBUG nova.network.neutron [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1876.836418] env[62816]: DEBUG nova.objects.instance [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'info_cache' on Instance uuid 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1876.848387] env[62816]: DEBUG oslo_vmware.api [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789155, 'name': ReconfigVM_Task, 'duration_secs': 0.153036} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.848504] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371164', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'name': 'volume-fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': 'f97ea34e-792e-4023-bd2f-549dba129925', 'attached_at': '', 'detached_at': '', 'volume_id': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2', 'serial': 'fce3ab81-3139-4d04-bdd8-7bec1dfedee2'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1877.028560] env[62816]: DEBUG nova.objects.instance [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'flavor' on Instance uuid d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1877.044563] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.805s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.065189] env[62816]: INFO nova.scheduler.client.report [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Deleted allocations for instance c66fa160-d4dd-429f-8751-f36cb2020ff1 [ 1877.136570] env[62816]: DEBUG nova.compute.manager [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.136622] env[62816]: DEBUG nova.compute.manager [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing instance network info cache due to event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1877.136858] env[62816]: DEBUG oslo_concurrency.lockutils [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.344229] env[62816]: DEBUG nova.objects.base [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Object Instance<9745413b-2bd8-45d7-8491-483e4921b59c> lazy-loaded attributes: flavor,info_cache {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1877.573130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0ad21e5b-5185-48c4-85ed-fc505ea4af48 tempest-ServerRescueNegativeTestJSON-1008492908 tempest-ServerRescueNegativeTestJSON-1008492908-project-member] Lock "c66fa160-d4dd-429f-8751-f36cb2020ff1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.735s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.593276] env[62816]: DEBUG nova.objects.instance [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'flavor' on Instance uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1877.611389] env[62816]: DEBUG nova.network.neutron [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updated VIF entry in instance network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1877.611597] env[62816]: DEBUG nova.network.neutron [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.035370] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e8e0ad9c-2052-47bc-ba4b-6254d08a1208 tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.247s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.055738] env[62816]: DEBUG nova.network.neutron [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [{"id": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "address": "fa:16:3e:f9:78:e0", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc924d6c0-d5", "ovs_interfaceid": "c924d6c0-d5cc-40a9-b561-9393a5f71201", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.114321] env[62816]: DEBUG oslo_concurrency.lockutils [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.114569] env[62816]: DEBUG nova.compute.manager [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1878.114737] env[62816]: DEBUG nova.compute.manager [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing instance network info cache due to event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1878.114943] env[62816]: DEBUG oslo_concurrency.lockutils [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.115096] env[62816]: DEBUG oslo_concurrency.lockutils [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.115258] env[62816]: DEBUG nova.network.neutron [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1878.116222] env[62816]: DEBUG oslo_concurrency.lockutils [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.116394] env[62816]: DEBUG nova.network.neutron [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1878.359143] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.359241] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.359384] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.359583] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.359769] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.362749] env[62816]: INFO nova.compute.manager [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Terminating instance [ 1878.365966] env[62816]: DEBUG nova.compute.manager [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1878.366190] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1878.367038] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da24fce-6933-400e-9ba2-974a31ae70ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.375589] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1878.375855] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-114d1e47-21ab-41b7-996d-5f8dda9c83b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.383032] env[62816]: DEBUG oslo_vmware.api [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1878.383032] env[62816]: value = "task-1789159" [ 1878.383032] env[62816]: _type = "Task" [ 1878.383032] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.392715] env[62816]: DEBUG oslo_vmware.api [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.559245] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-9745413b-2bd8-45d7-8491-483e4921b59c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.600223] env[62816]: DEBUG oslo_concurrency.lockutils [None req-26e330a6-f316-410f-b082-525c3f6ae61a tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.493s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.850729] env[62816]: DEBUG nova.network.neutron [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updated VIF entry in instance network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.851101] env[62816]: DEBUG nova.network.neutron [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.897824] env[62816]: DEBUG oslo_vmware.api [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789159, 'name': PowerOffVM_Task, 'duration_secs': 0.225281} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.898268] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1878.898584] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1878.898951] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01922cef-d138-4779-930c-f9bc453450b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.937717] env[62816]: DEBUG nova.network.neutron [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updated VIF entry in instance network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.937717] env[62816]: DEBUG nova.network.neutron [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.980021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1878.980021] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1878.980021] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Deleting the datastore file [datastore1] d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1878.980021] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7a444dd-c935-49e7-bff7-94f6b799ff40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.988020] env[62816]: DEBUG oslo_vmware.api [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for the task: (returnval){ [ 1878.988020] env[62816]: value = "task-1789162" [ 1878.988020] env[62816]: _type = "Task" [ 1878.988020] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.994729] env[62816]: DEBUG oslo_vmware.api [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789162, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.065021] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1879.065021] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a406feb9-7d39-4a5b-915d-343f73992358 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.077024] env[62816]: DEBUG oslo_vmware.api [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1879.077024] env[62816]: value = "task-1789163" [ 1879.077024] env[62816]: _type = "Task" [ 1879.077024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.088415] env[62816]: DEBUG oslo_vmware.api [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789163, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.354100] env[62816]: DEBUG oslo_concurrency.lockutils [req-6fca9734-aa91-4600-aebf-39fe8474962e req-e2ec0528-2e6d-402e-805e-9dab8e918e77 service nova] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.385063] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.385401] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.385674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "f97ea34e-792e-4023-bd2f-549dba129925-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.385910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1879.386129] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.388525] env[62816]: INFO nova.compute.manager [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Terminating instance [ 1879.390469] env[62816]: DEBUG nova.compute.manager [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1879.390672] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1879.391510] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a513ab1a-70d8-4fdf-ac79-fae1f708479b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.399099] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1879.399330] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c927a77e-3ef0-46a7-a50b-92ee78235b16 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.405331] env[62816]: DEBUG oslo_vmware.api [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1879.405331] env[62816]: value = "task-1789164" [ 1879.405331] env[62816]: _type = "Task" [ 1879.405331] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.413774] env[62816]: DEBUG oslo_vmware.api [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789164, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.439723] env[62816]: DEBUG oslo_concurrency.lockutils [req-26565a57-879a-4385-b6d6-9b5efdd19ced req-38c33a9d-ce9b-425d-8fb7-6fc976fc1df3 service nova] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.496436] env[62816]: DEBUG oslo_vmware.api [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Task: {'id': task-1789162, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138935} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.496745] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1879.496942] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1879.497160] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1879.497342] env[62816]: INFO nova.compute.manager [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1879.497612] env[62816]: DEBUG oslo.service.loopingcall [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.497842] env[62816]: DEBUG nova.compute.manager [-] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1879.497947] env[62816]: DEBUG nova.network.neutron [-] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1879.584204] env[62816]: DEBUG oslo_vmware.api [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789163, 'name': PowerOnVM_Task, 'duration_secs': 0.448005} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.584502] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1879.584723] env[62816]: DEBUG nova.compute.manager [None req-f346b71b-cb49-4d80-b084-030ad111a6a0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1879.585947] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e40ab50-180a-4372-a541-cbc95a6974df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.917243] env[62816]: DEBUG oslo_vmware.api [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789164, 'name': PowerOffVM_Task, 'duration_secs': 0.285502} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.917539] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1879.917712] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1879.918008] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d857b15c-5a1c-4d79-b6e4-99d2a7dd2ae8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.004456] env[62816]: DEBUG nova.compute.manager [req-befed68c-9900-4844-81e8-074033325b4c req-4956309e-f22c-4b2f-a112-9fb62c9c74ce service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Received event network-vif-deleted-2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1880.004685] env[62816]: INFO nova.compute.manager [req-befed68c-9900-4844-81e8-074033325b4c req-4956309e-f22c-4b2f-a112-9fb62c9c74ce service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Neutron deleted interface 2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1; detaching it from the instance and deleting it from the info cache [ 1880.004857] env[62816]: DEBUG nova.network.neutron [req-befed68c-9900-4844-81e8-074033325b4c req-4956309e-f22c-4b2f-a112-9fb62c9c74ce service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.019531] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1880.019767] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1880.019953] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleting the datastore file [datastore1] f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1880.020450] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e4313918-c085-4238-ad4b-2c5a54b8d4a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.026661] env[62816]: DEBUG oslo_vmware.api [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1880.026661] env[62816]: value = "task-1789166" [ 1880.026661] env[62816]: _type = "Task" [ 1880.026661] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.034710] env[62816]: DEBUG oslo_vmware.api [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.482156] env[62816]: DEBUG nova.network.neutron [-] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1880.495901] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.496178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.496382] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.496560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.496734] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.498690] env[62816]: INFO nova.compute.manager [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Terminating instance [ 1880.500439] env[62816]: DEBUG nova.compute.manager [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1880.500638] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1880.501471] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f4640dc-3529-4d8d-97df-f5e66a367c64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.509842] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1880.510097] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c24d19d4-c626-471a-bb63-9cf6e3525d08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.511906] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c080d1a1-288f-472d-a75b-bf1b2d65363e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.520699] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50211796-63b0-4ad1-b13e-859d16ad0311 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.533275] env[62816]: DEBUG oslo_vmware.api [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1880.533275] env[62816]: value = "task-1789167" [ 1880.533275] env[62816]: _type = "Task" [ 1880.533275] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.545803] env[62816]: DEBUG oslo_vmware.api [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.429992} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.548837] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1880.549046] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1880.549233] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1880.549409] env[62816]: INFO nova.compute.manager [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1880.549650] env[62816]: DEBUG oslo.service.loopingcall [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1880.549877] env[62816]: DEBUG oslo_vmware.api [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.559196] env[62816]: DEBUG nova.compute.manager [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1880.559342] env[62816]: DEBUG nova.network.neutron [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1880.561327] env[62816]: DEBUG nova.compute.manager [req-befed68c-9900-4844-81e8-074033325b4c req-4956309e-f22c-4b2f-a112-9fb62c9c74ce service nova] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Detach interface failed, port_id=2cd3caf0-0f18-4ee2-84a7-9a8bf4441dc1, reason: Instance d03ed540-5c20-4bcb-ac7e-eec8c09e4451 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1880.809442] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1880.809770] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1880.810632] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c92b5f-5d61-4da0-9432-82ff2ff6aa52 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.828110] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962347ae-431c-4c6d-9680-f1e70a29ab50 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.853134] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] volume-c253876c-91e9-4c8f-b674-798739e9b116/volume-c253876c-91e9-4c8f-b674-798739e9b116.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.853469] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-004220d0-78ca-4e4e-8087-556156c0cb75 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.872728] env[62816]: DEBUG oslo_vmware.api [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1880.872728] env[62816]: value = "task-1789168" [ 1880.872728] env[62816]: _type = "Task" [ 1880.872728] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.879888] env[62816]: DEBUG oslo_vmware.api [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789168, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.984831] env[62816]: INFO nova.compute.manager [-] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Took 1.49 seconds to deallocate network for instance. [ 1881.047353] env[62816]: DEBUG oslo_vmware.api [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789167, 'name': PowerOffVM_Task, 'duration_secs': 0.26849} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.047637] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1881.047811] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1881.048082] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-28ad3049-cef3-49a1-a59a-9e0c77f16b7d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.179539] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1881.179807] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1881.180033] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleting the datastore file [datastore1] 9745413b-2bd8-45d7-8491-483e4921b59c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1881.180513] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c4a1079-7d24-425b-99d0-464ea8e47f82 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.187365] env[62816]: DEBUG oslo_vmware.api [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1881.187365] env[62816]: value = "task-1789170" [ 1881.187365] env[62816]: _type = "Task" [ 1881.187365] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.199303] env[62816]: DEBUG oslo_vmware.api [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.382841] env[62816]: DEBUG oslo_vmware.api [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789168, 'name': ReconfigVM_Task, 'duration_secs': 0.376169} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.383248] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfigured VM instance instance-0000005c to attach disk [datastore1] volume-c253876c-91e9-4c8f-b674-798739e9b116/volume-c253876c-91e9-4c8f-b674-798739e9b116.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1881.388135] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8514bc8-fa0c-4746-90ca-507cba52e63b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.403142] env[62816]: DEBUG oslo_vmware.api [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1881.403142] env[62816]: value = "task-1789171" [ 1881.403142] env[62816]: _type = "Task" [ 1881.403142] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.410781] env[62816]: DEBUG oslo_vmware.api [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.492252] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.492584] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.492767] env[62816]: DEBUG nova.objects.instance [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lazy-loading 'resources' on Instance uuid d03ed540-5c20-4bcb-ac7e-eec8c09e4451 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.497845] env[62816]: DEBUG nova.network.neutron [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.696528] env[62816]: DEBUG oslo_vmware.api [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140647} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.699041] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1881.699041] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1881.699041] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1881.699041] env[62816]: INFO nova.compute.manager [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1881.699041] env[62816]: DEBUG oslo.service.loopingcall [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.699041] env[62816]: DEBUG nova.compute.manager [-] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1881.699041] env[62816]: DEBUG nova.network.neutron [-] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1881.918522] env[62816]: DEBUG oslo_vmware.api [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789171, 'name': ReconfigVM_Task, 'duration_secs': 0.148068} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.918522] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1882.000160] env[62816]: INFO nova.compute.manager [-] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Took 1.44 seconds to deallocate network for instance. [ 1882.038357] env[62816]: DEBUG nova.compute.manager [req-666cd355-84af-4b57-92af-77763a0da45f req-f218aee1-4f9c-4c76-8586-6912e11ea700 service nova] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Received event network-vif-deleted-d0353b95-1d3d-4eab-9c03-374679fe2118 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1882.127607] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99370868-6b18-45cc-bf29-0f39ce0424af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.136856] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315c87f9-6d67-4f72-b1bd-efda5c19017c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.169308] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb5f168-3aeb-4653-a0a8-2a9c83484fd4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.177341] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7806ecd-a05a-452e-bdd3-ada3886a13cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.190763] env[62816]: DEBUG nova.compute.provider_tree [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.507561] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.619036] env[62816]: DEBUG nova.network.neutron [-] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.695116] env[62816]: DEBUG nova.scheduler.client.report [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1882.955431] env[62816]: DEBUG nova.objects.instance [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'flavor' on Instance uuid e26b6593-7e64-4a43-b09d-92d2e668c25b {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.121595] env[62816]: INFO nova.compute.manager [-] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Took 1.42 seconds to deallocate network for instance. [ 1883.199832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.707s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.202213] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.695s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.202449] env[62816]: DEBUG nova.objects.instance [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'resources' on Instance uuid f97ea34e-792e-4023-bd2f-549dba129925 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1883.221756] env[62816]: INFO nova.scheduler.client.report [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Deleted allocations for instance d03ed540-5c20-4bcb-ac7e-eec8c09e4451 [ 1883.459936] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cb501b1f-a53e-4050-9983-41605c48e677 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.256s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.628275] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.734017] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0113bfb1-ba76-462a-a96b-386c3d9ae9eb tempest-AttachVolumeTestJSON-1059513280 tempest-AttachVolumeTestJSON-1059513280-project-member] Lock "d03ed540-5c20-4bcb-ac7e-eec8c09e4451" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.375s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.839534] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80fb314-866a-4e7f-bff8-450895e6a384 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.846941] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae596d9-e82a-4f0c-b004-6c30f2a469b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.878671] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bad488-d61c-48d5-ad59-a9d21db79d3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.885761] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794bcb9c-7a18-4bea-80ee-3e47a5a3bf3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.898593] env[62816]: DEBUG nova.compute.provider_tree [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.064482] env[62816]: DEBUG nova.compute.manager [req-a22c0b53-0324-49cd-8b15-71b33f6e54a9 req-2630bd55-61d5-4586-acdb-bcc812dd7201 service nova] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Received event network-vif-deleted-c924d6c0-d5cc-40a9-b561-9393a5f71201 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1884.239320] env[62816]: INFO nova.compute.manager [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Rebuilding instance [ 1884.278160] env[62816]: DEBUG nova.compute.manager [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1884.279102] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23db16a-476e-44e5-872c-066e7317739d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.310276] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.310840] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1884.311064] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1884.401641] env[62816]: DEBUG nova.scheduler.client.report [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1884.790789] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1884.791224] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-648b4e87-4014-428c-8456-280dd7dbe91d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.799319] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1884.799319] env[62816]: value = "task-1789173" [ 1884.799319] env[62816]: _type = "Task" [ 1884.799319] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.809184] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789173, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.907201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.705s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.909835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.282s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1884.910365] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.934574] env[62816]: INFO nova.scheduler.client.report [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted allocations for instance 9745413b-2bd8-45d7-8491-483e4921b59c [ 1884.938013] env[62816]: INFO nova.scheduler.client.report [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted allocations for instance f97ea34e-792e-4023-bd2f-549dba129925 [ 1885.310015] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789173, 'name': PowerOffVM_Task, 'duration_secs': 0.217337} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.310308] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1885.359389] env[62816]: INFO nova.compute.manager [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Detaching volume c253876c-91e9-4c8f-b674-798739e9b116 [ 1885.389179] env[62816]: INFO nova.virt.block_device [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Attempting to driver detach volume c253876c-91e9-4c8f-b674-798739e9b116 from mountpoint /dev/sdb [ 1885.389433] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1885.389626] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1885.390529] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d15ec2-ceb4-4074-8e4e-8bcc19f221ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.413255] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25df83a0-291e-45db-b39d-4b3b85bc724f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.419668] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0565987f-8052-495f-8648-45f97017ffd7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.439522] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c412b6d-8fe7-47fe-a6a5-7de177050b9a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.447118] env[62816]: DEBUG oslo_concurrency.lockutils [None req-67bae6c9-8665-4d41-8e08-76da7b4fdd9e tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "9745413b-2bd8-45d7-8491-483e4921b59c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.951s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.458412] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] The volume has not been displaced from its original location: [datastore1] volume-c253876c-91e9-4c8f-b674-798739e9b116/volume-c253876c-91e9-4c8f-b674-798739e9b116.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1885.464296] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1885.464858] env[62816]: DEBUG oslo_concurrency.lockutils [None req-72065cce-ecb4-401a-a534-a982b3e63488 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "f97ea34e-792e-4023-bd2f-549dba129925" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.079s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.465807] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-371d3471-5cd2-48d8-ae15-ac3df2789591 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.486174] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1885.486174] env[62816]: value = "task-1789174" [ 1885.486174] env[62816]: _type = "Task" [ 1885.486174] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.494696] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789174, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.998954] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789174, 'name': ReconfigVM_Task, 'duration_secs': 0.200777} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.999269] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1886.004891] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-432e9bbe-33b1-4813-a2e0-fb4bc42027c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.020499] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1886.020499] env[62816]: value = "task-1789175" [ 1886.020499] env[62816]: _type = "Task" [ 1886.020499] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.028470] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.530174] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789175, 'name': ReconfigVM_Task, 'duration_secs': 0.146602} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.530523] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1886.853669] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.853892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.854113] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1887.282432] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.282830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1887.727767] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1887.728100] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b89dac02-5e28-46f9-b618-b93fe395ff6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.735914] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1887.735914] env[62816]: value = "task-1789177" [ 1887.735914] env[62816]: _type = "Task" [ 1887.735914] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.743925] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.784910] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1888.128975] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [{"id": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "address": "fa:16:3e:de:ed:af", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c7e341-ff", "ovs_interfaceid": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1888.216674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.216915] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.246902] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1888.247167] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1888.247393] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1888.248337] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794f164c-c851-4ae9-a9a8-11815ba15af7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.269730] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19e6414-09a7-442a-a022-f6d683a7a4d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.277297] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1888.277632] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1888.278538] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783f787d-c1b3-4461-b659-e4a21990688c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.285642] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1888.285898] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c2680eb-d1bb-4473-bc74-9a515912bd57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.303279] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1888.303517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.305104] env[62816]: INFO nova.compute.claims [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1888.408834] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1888.409093] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1888.409285] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1888.409554] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-072bc271-fbca-4868-a32e-119bc24d3791 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.415627] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1888.415627] env[62816]: value = "task-1789179" [ 1888.415627] env[62816]: _type = "Task" [ 1888.415627] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.423084] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789179, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.632264] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.632480] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1888.632697] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.632862] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.633023] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.633183] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.633329] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.633476] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.633605] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1888.633745] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1888.719351] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1888.925424] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789179, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177109} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.925682] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1888.925869] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1888.926060] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1889.137085] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.237364] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.414359] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b70e220-a968-4a30-a439-f21481997168 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.421894] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb862718-0ead-4b82-b6ca-172f42f36924 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.453675] env[62816]: INFO nova.virt.block_device [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Booting with volume c253876c-91e9-4c8f-b674-798739e9b116 at /dev/sdb [ 1889.456209] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1750271c-c941-4b1a-a80b-9e81e682160e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.465917] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc61f52d-1429-452a-bf33-208b78946e74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.480178] env[62816]: DEBUG nova.compute.provider_tree [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1889.483313] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1fb1dce-1556-4b01-9cde-80e5bda67fb8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.491580] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-915ef904-c783-4dfb-aa84-ab8a0d8cbd13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.517169] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e239d5ab-becc-485f-8621-f0301bb0a95f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.524543] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca7d5acf-3cde-4460-b6d3-da624315d4ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.550547] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197281b8-5ef4-4378-a7f3-fc8ef955a8a4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.556204] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260e2fac-0d47-4560-a1e3-bf5808e5cb6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.568469] env[62816]: DEBUG nova.virt.block_device [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating existing volume attachment record: a6c1cbe3-6734-4930-8740-ec78c0040f9c {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1889.986677] env[62816]: DEBUG nova.scheduler.client.report [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1890.492237] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.188s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.492973] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1890.496256] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.359s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.496512] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.496709] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1890.497099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.260s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.498806] env[62816]: INFO nova.compute.claims [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1890.502802] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0107690-0cd4-4e76-9152-886270659be7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.512760] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78efed71-3d84-4691-aa29-19c032c15ec2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.527132] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd651435-4053-4362-ba68-724ece2f6758 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.533918] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c852b6c3-89fd-48e5-8723-48979d9d5aa6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.563809] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179853MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1890.563952] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.571125] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-55ff7fac-dd03-49d7-b99b-70da6123e691" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.571313] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-55ff7fac-dd03-49d7-b99b-70da6123e691" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.571655] env[62816]: DEBUG nova.objects.instance [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'flavor' on Instance uuid aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1891.004541] env[62816]: DEBUG nova.compute.utils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1891.005586] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1891.005825] env[62816]: DEBUG nova.network.neutron [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1891.056037] env[62816]: DEBUG nova.policy [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a59c608ab954a3ba9cd61a84f30b89f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c54ea5a5abf4f0298b76f6081de8e60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1891.330390] env[62816]: DEBUG nova.network.neutron [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Successfully created port: 7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1891.398838] env[62816]: DEBUG nova.objects.instance [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'pci_requests' on Instance uuid aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1891.508646] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1891.632956] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582b2a44-6732-4e0c-a1fa-693cf1edf6ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.640615] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f7ee3b-5c51-4049-8f77-63975f70c7c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.684054] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7934ddd-4eaa-4cfc-ae2a-126a6eb8e1f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.692292] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1891.692545] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1891.692705] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1891.692888] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1891.693073] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1891.693264] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1891.693481] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1891.693645] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1891.693815] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1891.693980] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1891.694179] env[62816]: DEBUG nova.virt.hardware [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1891.695392] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cebf90-768c-4214-a354-10b56e719e9f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.699561] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea0ae8b-a992-42a9-95eb-c057a5deb0b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.713624] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69c6248c-8825-40bf-aa38-f483d391c3b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.717342] env[62816]: DEBUG nova.compute.provider_tree [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.728894] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:53:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '464c5ce0-30b5-473d-910e-343ba514ffa7', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1891.736146] env[62816]: DEBUG oslo.service.loopingcall [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.736902] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1891.737131] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87a176c1-63f4-4777-8598-abd04d7e25f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.757020] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1891.757020] env[62816]: value = "task-1789181" [ 1891.757020] env[62816]: _type = "Task" [ 1891.757020] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.764973] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789181, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.902094] env[62816]: DEBUG nova.objects.base [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1891.902094] env[62816]: DEBUG nova.network.neutron [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1891.973453] env[62816]: DEBUG nova.policy [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1892.220530] env[62816]: DEBUG nova.scheduler.client.report [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1892.267425] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789181, 'name': CreateVM_Task, 'duration_secs': 0.309936} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.267425] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1892.267940] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.268144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.268672] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1892.269259] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a677cf-f14f-4c50-ae34-acc668facb55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.274302] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1892.274302] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525ab9ac-7830-0c43-586d-de5745453ab3" [ 1892.274302] env[62816]: _type = "Task" [ 1892.274302] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.281978] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525ab9ac-7830-0c43-586d-de5745453ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.517917] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1892.542168] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1892.542439] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1892.542598] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1892.542779] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1892.542923] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1892.543121] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1892.543350] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1892.543511] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1892.543677] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1892.543838] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1892.544022] env[62816]: DEBUG nova.virt.hardware [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1892.544923] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df045a95-53e2-4d8e-b14b-2ae3392e1f82 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.553998] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8583730e-493d-4853-bbd6-4e01ce2ebeb1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.728018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.728018] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1892.729429] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.165s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.772197] env[62816]: DEBUG nova.compute.manager [req-fb2af70c-bb71-4946-bdfb-4b336765a65a req-8fe97cb9-08de-447b-b638-6fffb4d51530 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Received event network-vif-plugged-7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1892.772197] env[62816]: DEBUG oslo_concurrency.lockutils [req-fb2af70c-bb71-4946-bdfb-4b336765a65a req-8fe97cb9-08de-447b-b638-6fffb4d51530 service nova] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.772197] env[62816]: DEBUG oslo_concurrency.lockutils [req-fb2af70c-bb71-4946-bdfb-4b336765a65a req-8fe97cb9-08de-447b-b638-6fffb4d51530 service nova] Lock "341bf195-e528-4e3b-8636-fac7a383d228-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.772197] env[62816]: DEBUG oslo_concurrency.lockutils [req-fb2af70c-bb71-4946-bdfb-4b336765a65a req-8fe97cb9-08de-447b-b638-6fffb4d51530 service nova] Lock "341bf195-e528-4e3b-8636-fac7a383d228-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.772197] env[62816]: DEBUG nova.compute.manager [req-fb2af70c-bb71-4946-bdfb-4b336765a65a req-8fe97cb9-08de-447b-b638-6fffb4d51530 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] No waiting events found dispatching network-vif-plugged-7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1892.772709] env[62816]: WARNING nova.compute.manager [req-fb2af70c-bb71-4946-bdfb-4b336765a65a req-8fe97cb9-08de-447b-b638-6fffb4d51530 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Received unexpected event network-vif-plugged-7731c29e-449a-4c40-bb70-5a2c88561abe for instance with vm_state building and task_state spawning. [ 1892.786222] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525ab9ac-7830-0c43-586d-de5745453ab3, 'name': SearchDatastore_Task, 'duration_secs': 0.00902} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.787202] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.787202] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1892.787202] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.787202] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.787382] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1892.787662] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9713d45-e211-44b1-9518-22f8c73e36d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.796178] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1892.796371] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1892.797077] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3e04dca-e4da-48c4-874d-4afc8fffb06f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.802935] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1892.802935] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a672a2-b131-583b-7ae6-089f349dc815" [ 1892.802935] env[62816]: _type = "Task" [ 1892.802935] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.810730] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a672a2-b131-583b-7ae6-089f349dc815, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.825337] env[62816]: DEBUG nova.network.neutron [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Successfully updated port: 7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1893.233033] env[62816]: DEBUG nova.compute.utils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.234479] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1893.234659] env[62816]: DEBUG nova.network.neutron [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1893.283285] env[62816]: DEBUG nova.policy [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bfce7acecae4c45b59ae478da8c6a67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '138797faa4144ecbad6956e126963199', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1893.313142] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a672a2-b131-583b-7ae6-089f349dc815, 'name': SearchDatastore_Task, 'duration_secs': 0.008731} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.315704] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21ebd064-052d-41d1-859f-e97a8a0cad2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.321634] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1893.321634] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522e0815-3c64-5fa5-33a7-f062897d99b0" [ 1893.321634] env[62816]: _type = "Task" [ 1893.321634] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.329519] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.329599] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.329720] env[62816]: DEBUG nova.network.neutron [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1893.330816] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522e0815-3c64-5fa5-33a7-f062897d99b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.374721] env[62816]: DEBUG nova.compute.manager [req-23010970-1e1d-4d53-9aa5-e08e23ea983a req-98b8ed60-ce58-4371-b764-d26956458b3e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-vif-plugged-55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1893.374945] env[62816]: DEBUG oslo_concurrency.lockutils [req-23010970-1e1d-4d53-9aa5-e08e23ea983a req-98b8ed60-ce58-4371-b764-d26956458b3e service nova] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.375223] env[62816]: DEBUG oslo_concurrency.lockutils [req-23010970-1e1d-4d53-9aa5-e08e23ea983a req-98b8ed60-ce58-4371-b764-d26956458b3e service nova] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.375406] env[62816]: DEBUG oslo_concurrency.lockutils [req-23010970-1e1d-4d53-9aa5-e08e23ea983a req-98b8ed60-ce58-4371-b764-d26956458b3e service nova] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.375576] env[62816]: DEBUG nova.compute.manager [req-23010970-1e1d-4d53-9aa5-e08e23ea983a req-98b8ed60-ce58-4371-b764-d26956458b3e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] No waiting events found dispatching network-vif-plugged-55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1893.375740] env[62816]: WARNING nova.compute.manager [req-23010970-1e1d-4d53-9aa5-e08e23ea983a req-98b8ed60-ce58-4371-b764-d26956458b3e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received unexpected event network-vif-plugged-55ff7fac-dd03-49d7-b99b-70da6123e691 for instance with vm_state active and task_state None. [ 1893.457242] env[62816]: DEBUG nova.network.neutron [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Successfully updated port: 55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1893.553512] env[62816]: DEBUG nova.network.neutron [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Successfully created port: 850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1893.741699] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4ab07a21-2685-42bc-af13-b95473993d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance e26b6593-7e64-4a43-b09d-92d2e668c25b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 37cb03ea-2e94-4466-89c0-2e3f7fdac076 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 251b3ce3-06a4-40d4-ba18-a217650c9152 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 341bf195-e528-4e3b-8636-fac7a383d228 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5b87e09d-ae08-4936-8479-c845e25b31b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1893.773452] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1893.835652] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522e0815-3c64-5fa5-33a7-f062897d99b0, 'name': SearchDatastore_Task, 'duration_secs': 0.009112} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.838056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.838371] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1893.839093] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0067164-78f9-4c66-ae2f-b836daade4d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.845353] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1893.845353] env[62816]: value = "task-1789182" [ 1893.845353] env[62816]: _type = "Task" [ 1893.845353] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.855302] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789182, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.885237] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeee0ce5-765f-4484-aad2-1cb8974b751f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.893026] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a7c928-6c53-4cf1-84fe-fcad0e579878 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.924147] env[62816]: DEBUG nova.network.neutron [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1893.929029] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35adf8e-b657-4154-b3ae-88df8de9b19f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.933654] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-becf93e0-79fc-4032-a9ed-0fd52fd3ab8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.951349] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1893.962770] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.962987] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.963301] env[62816]: DEBUG nova.network.neutron [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1894.177175] env[62816]: DEBUG nova.network.neutron [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.355665] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789182, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44794} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.356146] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1894.356278] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1894.356428] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4148b535-ee40-410f-97c3-7e491bd5b67a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.363658] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1894.363658] env[62816]: value = "task-1789183" [ 1894.363658] env[62816]: _type = "Task" [ 1894.363658] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.371405] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789183, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.463728] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1894.503233] env[62816]: WARNING nova.network.neutron [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] 31f515cd-2053-4577-9ed5-9de5fe666946 already exists in list: networks containing: ['31f515cd-2053-4577-9ed5-9de5fe666946']. ignoring it [ 1894.679829] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.680249] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Instance network_info: |[{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1894.683571] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:fe:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7731c29e-449a-4c40-bb70-5a2c88561abe', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1894.691349] env[62816]: DEBUG oslo.service.loopingcall [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.691582] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1894.692216] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a45e4247-b8a7-4471-801a-17e69612c300 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.713493] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1894.713493] env[62816]: value = "task-1789184" [ 1894.713493] env[62816]: _type = "Task" [ 1894.713493] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.726029] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789184, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.757596] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1894.795904] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1894.796963] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1894.797349] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1894.797701] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1894.797978] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1894.798282] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1894.798640] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1894.798930] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1894.799216] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1894.799490] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1894.799666] env[62816]: DEBUG nova.virt.hardware [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1894.800668] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578c8400-ad00-400b-9137-d7371878aa53 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.810176] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3570d231-08ec-4547-9fa4-507c95ab9d44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.881193] env[62816]: DEBUG nova.network.neutron [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55ff7fac-dd03-49d7-b99b-70da6123e691", "address": "fa:16:3e:37:62:e7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ff7fac-dd", "ovs_interfaceid": "55ff7fac-dd03-49d7-b99b-70da6123e691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.881193] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789183, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06469} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.881193] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1894.884044] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73aaa191-4134-4467-ab9c-1549b78439fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.887634] env[62816]: DEBUG nova.compute.manager [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Received event network-changed-7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1894.888144] env[62816]: DEBUG nova.compute.manager [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Refreshing instance network info cache due to event network-changed-7731c29e-449a-4c40-bb70-5a2c88561abe. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1894.888144] env[62816]: DEBUG oslo_concurrency.lockutils [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.888359] env[62816]: DEBUG oslo_concurrency.lockutils [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.888359] env[62816]: DEBUG nova.network.neutron [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Refreshing network info cache for port 7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1894.913719] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1894.915246] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c41c583-7c9a-49fc-b1a1-72bca1813441 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.937908] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1894.937908] env[62816]: value = "task-1789185" [ 1894.937908] env[62816]: _type = "Task" [ 1894.937908] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.947981] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789185, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.970446] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1894.970690] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.241s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.209668] env[62816]: DEBUG nova.network.neutron [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Successfully updated port: 850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1895.223764] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789184, 'name': CreateVM_Task, 'duration_secs': 0.3649} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.227095] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1895.227095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.227095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.227095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1895.227095] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bace7621-924b-4654-a77a-36b317ab363c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.231670] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1895.231670] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52630cd3-2a02-97cc-9e94-ca9ba2cbdc91" [ 1895.231670] env[62816]: _type = "Task" [ 1895.231670] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.240136] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52630cd3-2a02-97cc-9e94-ca9ba2cbdc91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.381833] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.382729] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.382930] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.383852] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aab31ee-8439-40ca-9ea4-abf09c27c95b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.402912] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1895.403181] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1895.403363] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1895.403567] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1895.403745] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1895.403886] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1895.404073] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1895.404257] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1895.404428] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1895.404699] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1895.404796] env[62816]: DEBUG nova.virt.hardware [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1895.411089] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Reconfiguring VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1895.411355] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7454c262-539a-443d-9ac8-34ed1e4ddf91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.431019] env[62816]: DEBUG oslo_vmware.api [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1895.431019] env[62816]: value = "task-1789186" [ 1895.431019] env[62816]: _type = "Task" [ 1895.431019] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.439445] env[62816]: DEBUG oslo_vmware.api [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789186, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.447290] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789185, 'name': ReconfigVM_Task, 'duration_secs': 0.259589} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.447580] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfigured VM instance instance-0000005c to attach disk [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b/e26b6593-7e64-4a43-b09d-92d2e668c25b.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1895.448782] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_format': None, 'device_type': 'disk', 'encryption_options': None, 'boot_index': 0, 'guest_format': None, 'size': 0, 'disk_bus': None, 'image_id': '844838ed-b150-482e-a0f6-dcce37470b52'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'device_type': None, 'boot_index': None, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'}, 'disk_bus': None, 'delete_on_termination': False, 'attachment_id': 'a6c1cbe3-6734-4930-8740-ec78c0040f9c', 'volume_type': None}], 'swap': None} {{(pid=62816) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1895.448914] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1895.449106] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1895.449875] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81de5d53-af7a-4828-bd60-34c57b926c44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.471545] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1175f5-f0eb-42b2-83a8-c1605b3cc19e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.475238] env[62816]: DEBUG nova.compute.manager [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-changed-55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1895.475428] env[62816]: DEBUG nova.compute.manager [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing instance network info cache due to event network-changed-55ff7fac-dd03-49d7-b99b-70da6123e691. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1895.475633] env[62816]: DEBUG oslo_concurrency.lockutils [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.475776] env[62816]: DEBUG oslo_concurrency.lockutils [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.475932] env[62816]: DEBUG nova.network.neutron [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing network info cache for port 55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1895.500940] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] volume-c253876c-91e9-4c8f-b674-798739e9b116/volume-c253876c-91e9-4c8f-b674-798739e9b116.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1895.501698] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-203488a3-1c2c-488c-8787-42f3334d877e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.568107] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1895.568107] env[62816]: value = "task-1789187" [ 1895.568107] env[62816]: _type = "Task" [ 1895.568107] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.578644] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789187, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.688306] env[62816]: DEBUG nova.network.neutron [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updated VIF entry in instance network info cache for port 7731c29e-449a-4c40-bb70-5a2c88561abe. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1895.688814] env[62816]: DEBUG nova.network.neutron [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1895.713133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.713133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.713133] env[62816]: DEBUG nova.network.neutron [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1895.742263] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52630cd3-2a02-97cc-9e94-ca9ba2cbdc91, 'name': SearchDatastore_Task, 'duration_secs': 0.009803} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.742600] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.742924] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.743686] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1895.743925] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1895.744144] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1895.744494] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbd570f6-bdeb-4b62-83ca-3df71afc523d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.752824] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1895.753509] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1895.754174] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72e23ae-7e96-4496-a75c-61ae323d2936 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.759201] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1895.759201] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5252aaa9-7db6-edde-77bf-3ba09559a954" [ 1895.759201] env[62816]: _type = "Task" [ 1895.759201] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.766624] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5252aaa9-7db6-edde-77bf-3ba09559a954, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.939681] env[62816]: DEBUG oslo_vmware.api [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.079794] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789187, 'name': ReconfigVM_Task, 'duration_secs': 0.33492} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.079794] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfigured VM instance instance-0000005c to attach disk [datastore1] volume-c253876c-91e9-4c8f-b674-798739e9b116/volume-c253876c-91e9-4c8f-b674-798739e9b116.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1896.084560] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4989332a-7a91-42d6-b82b-1c4dae152789 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.100782] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1896.100782] env[62816]: value = "task-1789188" [ 1896.100782] env[62816]: _type = "Task" [ 1896.100782] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.108900] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789188, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.192467] env[62816]: DEBUG oslo_concurrency.lockutils [req-55bb5112-68c9-4dbe-a419-ff194dc97a62 req-e7d0a79d-6f78-4fa9-a7ca-1b9d286629d7 service nova] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.201170] env[62816]: DEBUG nova.network.neutron [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updated VIF entry in instance network info cache for port 55ff7fac-dd03-49d7-b99b-70da6123e691. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1896.201668] env[62816]: DEBUG nova.network.neutron [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55ff7fac-dd03-49d7-b99b-70da6123e691", "address": "fa:16:3e:37:62:e7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ff7fac-dd", "ovs_interfaceid": "55ff7fac-dd03-49d7-b99b-70da6123e691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.245812] env[62816]: DEBUG nova.network.neutron [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1896.272724] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5252aaa9-7db6-edde-77bf-3ba09559a954, 'name': SearchDatastore_Task, 'duration_secs': 0.007884} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.273349] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17fda59e-0582-49a0-88aa-20034bc2c252 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.278862] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1896.278862] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526a167c-3c4d-9a06-8507-175ed9c37733" [ 1896.278862] env[62816]: _type = "Task" [ 1896.278862] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.286707] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526a167c-3c4d-9a06-8507-175ed9c37733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.394068] env[62816]: DEBUG nova.network.neutron [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.439611] env[62816]: DEBUG oslo_vmware.api [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789186, 'name': ReconfigVM_Task, 'duration_secs': 0.679209} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.440119] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.440355] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Reconfigured VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1896.610463] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789188, 'name': ReconfigVM_Task, 'duration_secs': 0.139914} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.610770] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1896.611415] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6d24d016-8eb4-45d5-9510-4720d89df502 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.617926] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1896.617926] env[62816]: value = "task-1789189" [ 1896.617926] env[62816]: _type = "Task" [ 1896.617926] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.625638] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789189, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.704839] env[62816]: DEBUG oslo_concurrency.lockutils [req-9adc2f36-9e66-4cff-8627-b6ee682f031c req-87d2db49-2463-4b82-bd70-0723af99aa1e service nova] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.788902] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526a167c-3c4d-9a06-8507-175ed9c37733, 'name': SearchDatastore_Task, 'duration_secs': 0.009327} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.789183] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.789460] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1896.789715] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-077cb222-7551-4afd-afe0-446e85caac42 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.795435] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1896.795435] env[62816]: value = "task-1789190" [ 1896.795435] env[62816]: _type = "Task" [ 1896.795435] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.802826] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789190, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.897025] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1896.897121] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance network_info: |[{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1896.897488] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:e2:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '850c89e0-1047-4847-b1c9-d9fd0435045e', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1896.905445] env[62816]: DEBUG oslo.service.loopingcall [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1896.905674] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1896.905899] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1522dd0-9f84-4337-960f-0c36e1a74834 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.922712] env[62816]: DEBUG nova.compute.manager [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-vif-plugged-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.923058] env[62816]: DEBUG oslo_concurrency.lockutils [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.923415] env[62816]: DEBUG oslo_concurrency.lockutils [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.923737] env[62816]: DEBUG oslo_concurrency.lockutils [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.924053] env[62816]: DEBUG nova.compute.manager [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] No waiting events found dispatching network-vif-plugged-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1896.924350] env[62816]: WARNING nova.compute.manager [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received unexpected event network-vif-plugged-850c89e0-1047-4847-b1c9-d9fd0435045e for instance with vm_state building and task_state spawning. [ 1896.924595] env[62816]: DEBUG nova.compute.manager [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.924820] env[62816]: DEBUG nova.compute.manager [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing instance network info cache due to event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1896.925093] env[62816]: DEBUG oslo_concurrency.lockutils [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.925296] env[62816]: DEBUG oslo_concurrency.lockutils [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.925521] env[62816]: DEBUG nova.network.neutron [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1896.932230] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1896.932230] env[62816]: value = "task-1789191" [ 1896.932230] env[62816]: _type = "Task" [ 1896.932230] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.941070] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789191, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.944259] env[62816]: DEBUG oslo_concurrency.lockutils [None req-98767908-5f93-4330-ba58-bad91530fa50 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-55ff7fac-dd03-49d7-b99b-70da6123e691" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.373s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.111286] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1897.134986] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789189, 'name': Rename_Task, 'duration_secs': 0.147763} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.137047] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1897.137584] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-75d1c08b-d43d-48b0-88d5-c43c91b44e6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.146742] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1897.146742] env[62816]: value = "task-1789192" [ 1897.146742] env[62816]: _type = "Task" [ 1897.146742] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.156615] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789192, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.307661] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789190, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.442928] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789191, 'name': CreateVM_Task, 'duration_secs': 0.386477} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.443220] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1897.443763] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.444021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.444266] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1897.444513] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53d2d90f-1ea5-449c-9164-856fb67a3043 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.451051] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1897.451051] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523baa25-527a-f8cb-54c9-04b63319803c" [ 1897.451051] env[62816]: _type = "Task" [ 1897.451051] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.459650] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523baa25-527a-f8cb-54c9-04b63319803c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.658542] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789192, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.722511] env[62816]: DEBUG nova.network.neutron [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updated VIF entry in instance network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1897.722802] env[62816]: DEBUG nova.network.neutron [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.808742] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789190, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.961193] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523baa25-527a-f8cb-54c9-04b63319803c, 'name': SearchDatastore_Task, 'duration_secs': 0.011234} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.961828] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.962087] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1897.962366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1897.962524] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.962707] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1897.962972] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0201d21-03ed-46f7-8461-170567597702 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.971092] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1897.971317] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1897.971963] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e32539c6-3e78-4d33-af85-d6db1504680a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.977599] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1897.977599] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522965b2-c42f-0261-4e01-68aa3609151a" [ 1897.977599] env[62816]: _type = "Task" [ 1897.977599] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.984970] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522965b2-c42f-0261-4e01-68aa3609151a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.156654] env[62816]: DEBUG oslo_vmware.api [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789192, 'name': PowerOnVM_Task, 'duration_secs': 0.691768} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.156930] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1898.157168] env[62816]: DEBUG nova.compute.manager [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1898.157912] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ecb342-7bc1-4c96-8f19-0d475d7c5052 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.226145] env[62816]: DEBUG oslo_concurrency.lockutils [req-62c4ac14-db3c-4cad-83a8-dfd1f92db8da req-d99e9138-d7ff-42b3-a6f5-79018cf444f2 service nova] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.307537] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789190, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.332639] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-55ff7fac-dd03-49d7-b99b-70da6123e691" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.332892] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-55ff7fac-dd03-49d7-b99b-70da6123e691" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.487724] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522965b2-c42f-0261-4e01-68aa3609151a, 'name': SearchDatastore_Task, 'duration_secs': 0.007664} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.488528] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3430db9-4c8d-49b9-bd01-cc0187bd8ee7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.495671] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1898.495671] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c523d6-d6b4-c4d4-3e32-2315316b571b" [ 1898.495671] env[62816]: _type = "Task" [ 1898.495671] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.503750] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c523d6-d6b4-c4d4-3e32-2315316b571b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.676412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.676412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.676564] env[62816]: DEBUG nova.objects.instance [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1898.808698] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789190, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.524777} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.808698] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1898.809087] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1898.809087] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4c8ce2b3-c790-464e-b9e7-e759c296e7a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.815787] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1898.815787] env[62816]: value = "task-1789193" [ 1898.815787] env[62816]: _type = "Task" [ 1898.815787] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.823510] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789193, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.835184] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.835466] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.836621] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7ae2a1-877f-4db7-bfa5-793a38b8781b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.853743] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-362a8e99-6901-4c5b-8117-b5988754fff3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.879090] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Reconfiguring VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1898.879364] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41c3c39e-4cf6-4d5b-aa99-8ef9d741668f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.898384] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1898.898384] env[62816]: value = "task-1789194" [ 1898.898384] env[62816]: _type = "Task" [ 1898.898384] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.906138] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.005301] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52c523d6-d6b4-c4d4-3e32-2315316b571b, 'name': SearchDatastore_Task, 'duration_secs': 0.009373} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.005524] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.005785] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1899.006048] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfd5b861-8440-472c-85e5-b56b92e0e48f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.012439] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1899.012439] env[62816]: value = "task-1789195" [ 1899.012439] env[62816]: _type = "Task" [ 1899.012439] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.019531] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.331268] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789193, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054291} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.331561] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1899.332546] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed64753-5628-4575-8cea-37c825e9fc7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.355600] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1899.355953] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7784ce62-b8ce-4581-ab1f-56fcc794f4f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.375354] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1899.375354] env[62816]: value = "task-1789196" [ 1899.375354] env[62816]: _type = "Task" [ 1899.375354] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.383779] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.407660] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.522115] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.447535} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.522363] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1899.522584] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1899.522837] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-238da954-7ca4-4ef0-a040-88001bc35d1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.529544] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1899.529544] env[62816]: value = "task-1789197" [ 1899.529544] env[62816]: _type = "Task" [ 1899.529544] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.536957] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.686766] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ea41376-21ef-4a9e-bde2-d13548fbcb77 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.885397] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789196, 'name': ReconfigVM_Task, 'duration_secs': 0.368573} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.885718] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1899.886577] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69c71a6e-1a6c-42d7-8768-542f316ec851 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.892655] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1899.892655] env[62816]: value = "task-1789198" [ 1899.892655] env[62816]: _type = "Task" [ 1899.892655] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.902893] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789198, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.909449] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.042306] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058054} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.042676] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1900.043763] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5602840b-f399-4190-87ce-b1d9eb07eca6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.075038] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1900.075038] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd1bbee6-f63e-4132-b1e3-0916a3496680 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.094386] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1900.094386] env[62816]: value = "task-1789199" [ 1900.094386] env[62816]: _type = "Task" [ 1900.094386] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.101563] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789199, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.403136] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789198, 'name': Rename_Task, 'duration_secs': 0.211166} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.406150] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1900.406403] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-515c3d03-beb2-4f7a-b28c-e8295d65c68c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.414511] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.415753] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1900.415753] env[62816]: value = "task-1789200" [ 1900.415753] env[62816]: _type = "Task" [ 1900.415753] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.422897] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.604248] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789199, 'name': ReconfigVM_Task, 'duration_secs': 0.269961} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.604589] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1900.605173] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-066d0959-095f-4119-b006-2d49b974ceb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.610724] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1900.610724] env[62816]: value = "task-1789201" [ 1900.610724] env[62816]: _type = "Task" [ 1900.610724] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.618349] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789201, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.912899] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.924449] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789200, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.120203] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789201, 'name': Rename_Task, 'duration_secs': 0.138458} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.120501] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1901.120783] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6ab1119-2cbb-41a3-b4ce-b9011b0beb10 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.126700] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1901.126700] env[62816]: value = "task-1789202" [ 1901.126700] env[62816]: _type = "Task" [ 1901.126700] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.133841] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789202, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.414932] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.425282] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789200, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.637799] env[62816]: DEBUG oslo_vmware.api [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789202, 'name': PowerOnVM_Task, 'duration_secs': 0.437065} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.638230] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1901.638538] env[62816]: INFO nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Took 6.88 seconds to spawn the instance on the hypervisor. [ 1901.638806] env[62816]: DEBUG nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1901.639997] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cad95eb-75ab-4d01-89df-ab7423afd730 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.914872] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.925672] env[62816]: DEBUG oslo_vmware.api [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789200, 'name': PowerOnVM_Task, 'duration_secs': 1.307539} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.925924] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1901.926151] env[62816]: INFO nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1901.926338] env[62816]: DEBUG nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1901.927080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4060190-08ac-4683-af0c-0a32b433eb40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.156568] env[62816]: INFO nova.compute.manager [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Took 12.93 seconds to build instance. [ 1902.415878] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.444954] env[62816]: INFO nova.compute.manager [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Took 14.16 seconds to build instance. [ 1902.657664] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3a6d3514-e5c3-4f48-8b64-c81c16735481 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.440s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1902.721095] env[62816]: DEBUG nova.compute.manager [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1902.721198] env[62816]: DEBUG nova.compute.manager [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing instance network info cache due to event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1902.721412] env[62816]: DEBUG oslo_concurrency.lockutils [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.721550] env[62816]: DEBUG oslo_concurrency.lockutils [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.721821] env[62816]: DEBUG nova.network.neutron [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1902.916785] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.947454] env[62816]: DEBUG oslo_concurrency.lockutils [None req-78e6fe73-852e-407a-9932-3635e578311b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.665s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.189106] env[62816]: DEBUG nova.compute.manager [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Received event network-changed-7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1903.189325] env[62816]: DEBUG nova.compute.manager [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Refreshing instance network info cache due to event network-changed-7731c29e-449a-4c40-bb70-5a2c88561abe. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1903.189536] env[62816]: DEBUG oslo_concurrency.lockutils [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.189684] env[62816]: DEBUG oslo_concurrency.lockutils [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.189850] env[62816]: DEBUG nova.network.neutron [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Refreshing network info cache for port 7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1903.417054] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.483868] env[62816]: DEBUG nova.network.neutron [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updated VIF entry in instance network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1903.484265] env[62816]: DEBUG nova.network.neutron [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.909260] env[62816]: DEBUG nova.network.neutron [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updated VIF entry in instance network info cache for port 7731c29e-449a-4c40-bb70-5a2c88561abe. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1903.909659] env[62816]: DEBUG nova.network.neutron [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.920402] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.986868] env[62816]: DEBUG oslo_concurrency.lockutils [req-9e396b2a-fe4b-434d-a16c-8f35014d71c8 req-3e7fffd1-f97f-4050-9ecc-1a89357e654c service nova] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.415601] env[62816]: DEBUG oslo_concurrency.lockutils [req-d39332ef-9555-440e-a41b-fcf995631f3d req-a7c7c767-dcd4-42c2-ac99-f1e542862bfd service nova] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.421538] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.921738] env[62816]: DEBUG oslo_vmware.api [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789194, 'name': ReconfigVM_Task, 'duration_secs': 5.753201} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.922144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.922211] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Reconfigured VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1906.050949] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.051307] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.051404] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.051598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.051766] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.054330] env[62816]: INFO nova.compute.manager [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Terminating instance [ 1906.056159] env[62816]: DEBUG nova.compute.manager [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1906.056358] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1906.057202] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd1c69c-7d17-459d-896c-036c431ccf28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.064621] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1906.064845] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3751712b-28a8-4142-b4c8-557b9e499a85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.070550] env[62816]: DEBUG oslo_vmware.api [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1906.070550] env[62816]: value = "task-1789203" [ 1906.070550] env[62816]: _type = "Task" [ 1906.070550] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.077887] env[62816]: DEBUG oslo_vmware.api [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789203, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.256980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.257222] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.257415] env[62816]: DEBUG nova.network.neutron [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1906.581094] env[62816]: DEBUG oslo_vmware.api [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789203, 'name': PowerOffVM_Task, 'duration_secs': 0.32077} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.581387] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1906.581606] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1906.581887] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-451167e7-f17c-4e14-a81d-5918c6977bee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.680454] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1906.680718] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1906.680916] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Deleting the datastore file [datastore1] 37cb03ea-2e94-4466-89c0-2e3f7fdac076 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1906.681235] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5a9f47b-79b7-4f4a-83c4-3b3efc86dd6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.688826] env[62816]: DEBUG oslo_vmware.api [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for the task: (returnval){ [ 1906.688826] env[62816]: value = "task-1789205" [ 1906.688826] env[62816]: _type = "Task" [ 1906.688826] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.696384] env[62816]: DEBUG oslo_vmware.api [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789205, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.967731] env[62816]: INFO nova.network.neutron [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Port 55ff7fac-dd03-49d7-b99b-70da6123e691 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1906.968133] env[62816]: DEBUG nova.network.neutron [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.144857] env[62816]: DEBUG nova.compute.manager [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1907.145114] env[62816]: DEBUG nova.compute.manager [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing instance network info cache due to event network-changed-9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1907.145258] env[62816]: DEBUG oslo_concurrency.lockutils [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] Acquiring lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.201475] env[62816]: DEBUG oslo_vmware.api [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Task: {'id': task-1789205, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220329} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.201850] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1907.202174] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1907.202485] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1907.202770] env[62816]: INFO nova.compute.manager [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1907.203082] env[62816]: DEBUG oslo.service.loopingcall [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1907.203376] env[62816]: DEBUG nova.compute.manager [-] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1907.203516] env[62816]: DEBUG nova.network.neutron [-] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1907.471089] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.473904] env[62816]: DEBUG oslo_concurrency.lockutils [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] Acquired lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.474131] env[62816]: DEBUG nova.network.neutron [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Refreshing network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1907.645644] env[62816]: DEBUG nova.compute.manager [req-ce107344-9bf8-4668-975a-50be3bea62af req-09962f76-ecd1-4996-9bfd-bdf176836f65 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Received event network-vif-deleted-e9b1f47c-7b77-4707-a285-7130979eca9e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1907.645924] env[62816]: INFO nova.compute.manager [req-ce107344-9bf8-4668-975a-50be3bea62af req-09962f76-ecd1-4996-9bfd-bdf176836f65 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Neutron deleted interface e9b1f47c-7b77-4707-a285-7130979eca9e; detaching it from the instance and deleting it from the info cache [ 1907.646203] env[62816]: DEBUG nova.network.neutron [req-ce107344-9bf8-4668-975a-50be3bea62af req-09962f76-ecd1-4996-9bfd-bdf176836f65 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.824485] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-251b3ce3-06a4-40d4-ba18-a217650c9152-55ff7fac-dd03-49d7-b99b-70da6123e691" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.824776] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-251b3ce3-06a4-40d4-ba18-a217650c9152-55ff7fac-dd03-49d7-b99b-70da6123e691" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.825164] env[62816]: DEBUG nova.objects.instance [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'flavor' on Instance uuid 251b3ce3-06a4-40d4-ba18-a217650c9152 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1907.976526] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e83d9a9f-46d7-45c1-a6c2-f111648b9186 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-55ff7fac-dd03-49d7-b99b-70da6123e691" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.643s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.121011] env[62816]: DEBUG nova.network.neutron [-] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.148677] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-602c9bbc-3d46-4fc3-b114-0157d1b7f0af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.159538] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dea2794-e8a3-49fc-8fd3-292193747e4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.191420] env[62816]: DEBUG nova.compute.manager [req-ce107344-9bf8-4668-975a-50be3bea62af req-09962f76-ecd1-4996-9bfd-bdf176836f65 service nova] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Detach interface failed, port_id=e9b1f47c-7b77-4707-a285-7130979eca9e, reason: Instance 37cb03ea-2e94-4466-89c0-2e3f7fdac076 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1908.206271] env[62816]: DEBUG nova.network.neutron [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updated VIF entry in instance network info cache for port 9f110684-506a-45d4-bf70-da542c84eeb8. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1908.206636] env[62816]: DEBUG nova.network.neutron [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [{"id": "9f110684-506a-45d4-bf70-da542c84eeb8", "address": "fa:16:3e:c4:1a:de", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f110684-50", "ovs_interfaceid": "9f110684-506a-45d4-bf70-da542c84eeb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.441800] env[62816]: DEBUG nova.objects.instance [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'pci_requests' on Instance uuid 251b3ce3-06a4-40d4-ba18-a217650c9152 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1908.624088] env[62816]: INFO nova.compute.manager [-] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Took 1.42 seconds to deallocate network for instance. [ 1908.708877] env[62816]: DEBUG oslo_concurrency.lockutils [req-a93b852a-02cb-4b7b-be92-759103151d27 req-709fbe82-8493-43fa-b3a8-cfed5c48b3fb service nova] Releasing lock "refresh_cache-aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.944894] env[62816]: DEBUG nova.objects.base [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Object Instance<251b3ce3-06a4-40d4-ba18-a217650c9152> lazy-loaded attributes: flavor,pci_requests {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1908.945163] env[62816]: DEBUG nova.network.neutron [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1909.008294] env[62816]: DEBUG nova.policy [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '57edb89da85842f68f056aced5a5d4af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d3fae79b00d494daaadfee718781379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1909.131260] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.131606] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.131926] env[62816]: DEBUG nova.objects.instance [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lazy-loading 'resources' on Instance uuid 37cb03ea-2e94-4466-89c0-2e3f7fdac076 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1909.174199] env[62816]: DEBUG nova.compute.manager [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1909.174472] env[62816]: DEBUG nova.compute.manager [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing instance network info cache due to event network-changed-951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1909.174613] env[62816]: DEBUG oslo_concurrency.lockutils [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.174769] env[62816]: DEBUG oslo_concurrency.lockutils [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.174954] env[62816]: DEBUG nova.network.neutron [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1909.737347] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c75c4a-38fd-4089-a517-4d97ac011e56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.746180] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-697cdacb-b1b1-4092-bbe8-c60dee00ffc5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.776939] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0db22aa-8436-43a1-be7b-a5446867c97c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.785797] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f7a41eb-8d32-4a7a-9c8d-3bdb4bb114ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.798454] env[62816]: DEBUG nova.compute.provider_tree [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.887395] env[62816]: DEBUG nova.network.neutron [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updated VIF entry in instance network info cache for port 951e7a24-5179-43e6-b530-4769ba0ffdb4. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1909.887764] env[62816]: DEBUG nova.network.neutron [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.302919] env[62816]: DEBUG nova.scheduler.client.report [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1910.390559] env[62816]: DEBUG oslo_concurrency.lockutils [req-5a802e7d-050b-4ba3-8ab1-bc964fac7ba2 req-dd4293ac-c959-4b75-aab0-3dc588b86249 service nova] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1910.624285] env[62816]: DEBUG nova.compute.manager [req-0b95c6ee-26cf-4626-b361-21fa7813805a req-c860ecdc-cb70-4eea-9694-af3e18dc4ac2 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-vif-plugged-55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1910.624498] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b95c6ee-26cf-4626-b361-21fa7813805a req-c860ecdc-cb70-4eea-9694-af3e18dc4ac2 service nova] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.624713] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b95c6ee-26cf-4626-b361-21fa7813805a req-c860ecdc-cb70-4eea-9694-af3e18dc4ac2 service nova] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.624886] env[62816]: DEBUG oslo_concurrency.lockutils [req-0b95c6ee-26cf-4626-b361-21fa7813805a req-c860ecdc-cb70-4eea-9694-af3e18dc4ac2 service nova] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.625391] env[62816]: DEBUG nova.compute.manager [req-0b95c6ee-26cf-4626-b361-21fa7813805a req-c860ecdc-cb70-4eea-9694-af3e18dc4ac2 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] No waiting events found dispatching network-vif-plugged-55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1910.625654] env[62816]: WARNING nova.compute.manager [req-0b95c6ee-26cf-4626-b361-21fa7813805a req-c860ecdc-cb70-4eea-9694-af3e18dc4ac2 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received unexpected event network-vif-plugged-55ff7fac-dd03-49d7-b99b-70da6123e691 for instance with vm_state active and task_state None. [ 1910.738069] env[62816]: DEBUG nova.network.neutron [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Successfully updated port: 55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1910.808101] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.827958] env[62816]: INFO nova.scheduler.client.report [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Deleted allocations for instance 37cb03ea-2e94-4466-89c0-2e3f7fdac076 [ 1911.242028] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.242028] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.242028] env[62816]: DEBUG nova.network.neutron [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1911.335248] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5cdf51c1-4682-4abe-997a-d1fcbabd27b4 tempest-ServersTestJSON-1116385564 tempest-ServersTestJSON-1116385564-project-member] Lock "37cb03ea-2e94-4466-89c0-2e3f7fdac076" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.284s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.640267] env[62816]: DEBUG oslo_concurrency.lockutils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.640548] env[62816]: DEBUG oslo_concurrency.lockutils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.778704] env[62816]: WARNING nova.network.neutron [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] 31f515cd-2053-4577-9ed5-9de5fe666946 already exists in list: networks containing: ['31f515cd-2053-4577-9ed5-9de5fe666946']. ignoring it [ 1912.049694] env[62816]: DEBUG nova.network.neutron [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55ff7fac-dd03-49d7-b99b-70da6123e691", "address": "fa:16:3e:37:62:e7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ff7fac-dd", "ovs_interfaceid": "55ff7fac-dd03-49d7-b99b-70da6123e691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.144105] env[62816]: DEBUG nova.compute.utils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1912.552328] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.553087] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.553254] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.554197] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21eb762-5c0f-4d6f-939a-9f0a47274d74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.571483] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1912.571730] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1912.571932] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1912.572143] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1912.572300] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1912.572439] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1912.572679] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1912.572851] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1912.573053] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1912.573236] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1912.573415] env[62816]: DEBUG nova.virt.hardware [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1912.579629] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Reconfiguring VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1912.579937] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-556959d8-2881-4b82-8029-ef86eb85775d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.597627] env[62816]: DEBUG oslo_vmware.api [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1912.597627] env[62816]: value = "task-1789206" [ 1912.597627] env[62816]: _type = "Task" [ 1912.597627] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.605456] env[62816]: DEBUG oslo_vmware.api [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789206, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.646383] env[62816]: DEBUG oslo_concurrency.lockutils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.109044] env[62816]: DEBUG oslo_vmware.api [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789206, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.246989] env[62816]: DEBUG nova.compute.manager [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-changed-55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1913.247344] env[62816]: DEBUG nova.compute.manager [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing instance network info cache due to event network-changed-55ff7fac-dd03-49d7-b99b-70da6123e691. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1913.247683] env[62816]: DEBUG oslo_concurrency.lockutils [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.247949] env[62816]: DEBUG oslo_concurrency.lockutils [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.248256] env[62816]: DEBUG nova.network.neutron [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Refreshing network info cache for port 55ff7fac-dd03-49d7-b99b-70da6123e691 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1913.608605] env[62816]: DEBUG oslo_vmware.api [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789206, 'name': ReconfigVM_Task, 'duration_secs': 0.705918} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.609124] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1913.609341] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Reconfigured VM to attach interface {{(pid=62816) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1913.721826] env[62816]: DEBUG oslo_concurrency.lockutils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.722133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.722380] env[62816]: INFO nova.compute.manager [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Attaching volume 3d0f33fb-fc65-4226-8a12-1c8d2e4636ac to /dev/sdb [ 1913.757490] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5ca6c5-f9eb-444b-85ca-6a9202ac6dd5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.765073] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a237de4-3077-4caa-bcc0-c61cfc0c9201 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.778759] env[62816]: DEBUG nova.virt.block_device [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updating existing volume attachment record: 8ea5ff85-14da-4da0-84bf-41c84c67fae5 {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1913.974843] env[62816]: DEBUG nova.network.neutron [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updated VIF entry in instance network info cache for port 55ff7fac-dd03-49d7-b99b-70da6123e691. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1913.975393] env[62816]: DEBUG nova.network.neutron [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "55ff7fac-dd03-49d7-b99b-70da6123e691", "address": "fa:16:3e:37:62:e7", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap55ff7fac-dd", "ovs_interfaceid": "55ff7fac-dd03-49d7-b99b-70da6123e691", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.116935] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e1ebe473-e243-4338-9488-1a95898c7592 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-251b3ce3-06a4-40d4-ba18-a217650c9152-55ff7fac-dd03-49d7-b99b-70da6123e691" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.292s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.479108] env[62816]: DEBUG oslo_concurrency.lockutils [req-f41166a9-a8f9-4106-8655-0eea3f3e190c req-fd39b20d-3f7e-443e-b461-c36532b07f19 service nova] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.527069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "interface-251b3ce3-06a4-40d4-ba18-a217650c9152-55ff7fac-dd03-49d7-b99b-70da6123e691" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.527424] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-251b3ce3-06a4-40d4-ba18-a217650c9152-55ff7fac-dd03-49d7-b99b-70da6123e691" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.030330] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1916.030532] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1916.031525] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705ce3bf-740c-47e3-a5fe-7f137a44767b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.050916] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1986dc9c-4bca-47c7-9eff-f7369c4d3781 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.077661] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Reconfiguring VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1916.078120] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b12680d7-ab28-46bf-931d-54405b49f5fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.097807] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1916.097807] env[62816]: value = "task-1789209" [ 1916.097807] env[62816]: _type = "Task" [ 1916.097807] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.105598] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.598040] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.598313] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.609429] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.100637] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1917.112814] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.612726] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.629606] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.629910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.631521] env[62816]: INFO nova.compute.claims [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1918.112956] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.322677] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1918.322927] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371189', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'name': 'volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6f0c72ab-1eaf-4db5-842f-b0ba75739e66', 'attached_at': '', 'detached_at': '', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'serial': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1918.323831] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba43cc0-d30e-4ab2-91fb-e23e029e4cf6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.340188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d546e927-9f66-4b28-9570-2464bf0b1552 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.364600] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac/volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1918.364828] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e7a19bcf-02cd-4245-aa11-95e0ea8b1fc9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.381179] env[62816]: DEBUG oslo_vmware.api [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1918.381179] env[62816]: value = "task-1789210" [ 1918.381179] env[62816]: _type = "Task" [ 1918.381179] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.388582] env[62816]: DEBUG oslo_vmware.api [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789210, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.613628] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.747062] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a57ea6d-052b-45d0-a1a4-dccebdc7714d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.755093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ca67c9-6f52-471a-97be-0ae3d49cdc1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.784630] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95435f93-6272-43bd-85bd-abc2a607b8fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.792188] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c984809d-43e6-429d-9ef5-d9387b325eef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.805131] env[62816]: DEBUG nova.compute.provider_tree [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.892703] env[62816]: DEBUG oslo_vmware.api [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789210, 'name': ReconfigVM_Task, 'duration_secs': 0.315987} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.892961] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Reconfigured VM instance instance-00000060 to attach disk [datastore1] volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac/volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1918.897541] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af184aa4-fbe3-4d28-9dde-46394a58536f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.912332] env[62816]: DEBUG oslo_vmware.api [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1918.912332] env[62816]: value = "task-1789211" [ 1918.912332] env[62816]: _type = "Task" [ 1918.912332] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.920229] env[62816]: DEBUG oslo_vmware.api [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.113537] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.307970] env[62816]: DEBUG nova.scheduler.client.report [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1919.421797] env[62816]: DEBUG oslo_vmware.api [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789211, 'name': ReconfigVM_Task, 'duration_secs': 0.127791} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.422067] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371189', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'name': 'volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6f0c72ab-1eaf-4db5-842f-b0ba75739e66', 'attached_at': '', 'detached_at': '', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'serial': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1919.614297] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.812963] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.183s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.813542] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1920.115008] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.318704] env[62816]: DEBUG nova.compute.utils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1920.320749] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1920.320749] env[62816]: DEBUG nova.network.neutron [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1920.358303] env[62816]: DEBUG nova.policy [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '72095e55e4e149bab7adc07e47c6c281', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7d623d459be54e85890461e933833908', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1920.458385] env[62816]: DEBUG nova.objects.instance [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'flavor' on Instance uuid 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1920.616409] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.625083] env[62816]: DEBUG nova.network.neutron [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Successfully created port: 8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1920.824114] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1920.963528] env[62816]: DEBUG oslo_concurrency.lockutils [None req-efb98cf0-858d-4eda-b957-a50c8541c1da tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.241s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.116611] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.182353] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.182670] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.617757] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.686262] env[62816]: INFO nova.compute.manager [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Detaching volume 3d0f33fb-fc65-4226-8a12-1c8d2e4636ac [ 1921.719277] env[62816]: INFO nova.virt.block_device [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Attempting to driver detach volume 3d0f33fb-fc65-4226-8a12-1c8d2e4636ac from mountpoint /dev/sdb [ 1921.719545] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1921.719774] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371189', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'name': 'volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6f0c72ab-1eaf-4db5-842f-b0ba75739e66', 'attached_at': '', 'detached_at': '', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'serial': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1921.720663] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee72b871-fdc6-445f-bf68-b1ad7c00018f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.742692] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc2f374-de8b-41b4-9ec7-f16432a99195 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.749602] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73e4600-b2af-4d93-8b57-a6f144fe8ca6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.769529] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6deb720-f13f-4dff-849a-196f36d51cb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.784388] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] The volume has not been displaced from its original location: [datastore1] volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac/volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1921.789538] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Reconfiguring VM instance instance-00000060 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1921.790010] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be615473-f18e-4dea-bd5b-67da35dec4d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.807994] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1921.807994] env[62816]: value = "task-1789212" [ 1921.807994] env[62816]: _type = "Task" [ 1921.807994] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.815703] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789212, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.834084] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1921.863114] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1921.863384] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1921.863546] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1921.863741] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1921.863995] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1921.864192] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1921.864412] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1921.864577] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1921.864751] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1921.864919] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1921.865117] env[62816]: DEBUG nova.virt.hardware [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1921.865971] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10bc538e-9f06-40ec-90a4-b2c1157dfa34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.874101] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e8b5b51-6715-4876-9e8a-7f1e0a2e52ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.005325] env[62816]: DEBUG nova.compute.manager [req-76229396-2859-4c5b-8645-a886468aaa44 req-c1f54d07-afdd-4a11-860c-32032a1cb1b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Received event network-vif-plugged-8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1922.005547] env[62816]: DEBUG oslo_concurrency.lockutils [req-76229396-2859-4c5b-8645-a886468aaa44 req-c1f54d07-afdd-4a11-860c-32032a1cb1b3 service nova] Acquiring lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.005755] env[62816]: DEBUG oslo_concurrency.lockutils [req-76229396-2859-4c5b-8645-a886468aaa44 req-c1f54d07-afdd-4a11-860c-32032a1cb1b3 service nova] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.005927] env[62816]: DEBUG oslo_concurrency.lockutils [req-76229396-2859-4c5b-8645-a886468aaa44 req-c1f54d07-afdd-4a11-860c-32032a1cb1b3 service nova] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.006105] env[62816]: DEBUG nova.compute.manager [req-76229396-2859-4c5b-8645-a886468aaa44 req-c1f54d07-afdd-4a11-860c-32032a1cb1b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] No waiting events found dispatching network-vif-plugged-8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1922.006270] env[62816]: WARNING nova.compute.manager [req-76229396-2859-4c5b-8645-a886468aaa44 req-c1f54d07-afdd-4a11-860c-32032a1cb1b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Received unexpected event network-vif-plugged-8f334780-4169-45ed-aac2-cf2be2b0b27a for instance with vm_state building and task_state spawning. [ 1922.088336] env[62816]: DEBUG nova.network.neutron [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Successfully updated port: 8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1922.117670] env[62816]: DEBUG oslo_vmware.api [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789209, 'name': ReconfigVM_Task, 'duration_secs': 5.818648} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.117878] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.118103] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Reconfigured VM to detach interface {{(pid=62816) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1922.317894] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789212, 'name': ReconfigVM_Task, 'duration_secs': 0.20302} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.318053] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Reconfigured VM instance instance-00000060 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1922.323108] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35fdbf6e-91d4-4eef-972f-56a3b960cd27 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.338103] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1922.338103] env[62816]: value = "task-1789213" [ 1922.338103] env[62816]: _type = "Task" [ 1922.338103] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.345897] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789213, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.591066] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.591364] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquired lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.591620] env[62816]: DEBUG nova.network.neutron [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1922.848297] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789213, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.121955] env[62816]: DEBUG nova.network.neutron [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1923.244688] env[62816]: DEBUG nova.network.neutron [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updating instance_info_cache with network_info: [{"id": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "address": "fa:16:3e:67:b5:ca", "network": {"id": "6d86a457-3e10-4528-b3c0-c8a78e3408c1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-872358446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d623d459be54e85890461e933833908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f334780-41", "ovs_interfaceid": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.349030] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789213, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.386997] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1923.387227] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquired lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1923.387412] env[62816]: DEBUG nova.network.neutron [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1923.748744] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Releasing lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.748744] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Instance network_info: |[{"id": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "address": "fa:16:3e:67:b5:ca", "network": {"id": "6d86a457-3e10-4528-b3c0-c8a78e3408c1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-872358446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d623d459be54e85890461e933833908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f334780-41", "ovs_interfaceid": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1923.748744] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:b5:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd177c5b3-a5b1-4c78-854e-7e0dbf341ea1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f334780-4169-45ed-aac2-cf2be2b0b27a', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1923.755503] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Creating folder: Project (7d623d459be54e85890461e933833908). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1923.755774] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdd3780c-7419-4819-ae2e-c0a4744808a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.766774] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Created folder: Project (7d623d459be54e85890461e933833908) in parent group-v370905. [ 1923.766963] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Creating folder: Instances. Parent ref: group-v371190. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1923.767205] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f27db2f2-1323-44a5-af46-69c9d22e5e6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.776587] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Created folder: Instances in parent group-v371190. [ 1923.776819] env[62816]: DEBUG oslo.service.loopingcall [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1923.777018] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1923.777222] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-364408e1-f9b8-45f2-82ae-cf8571d0f9ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.797271] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1923.797271] env[62816]: value = "task-1789216" [ 1923.797271] env[62816]: _type = "Task" [ 1923.797271] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.804809] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789216, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.847826] env[62816]: DEBUG oslo_vmware.api [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789213, 'name': ReconfigVM_Task, 'duration_secs': 1.128416} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.848183] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371189', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'name': 'volume-3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '6f0c72ab-1eaf-4db5-842f-b0ba75739e66', 'attached_at': '', 'detached_at': '', 'volume_id': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac', 'serial': '3d0f33fb-fc65-4226-8a12-1c8d2e4636ac'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1923.950210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.950562] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.950879] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1923.951199] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.951395] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.953959] env[62816]: INFO nova.compute.manager [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Terminating instance [ 1923.956135] env[62816]: DEBUG nova.compute.manager [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1923.956342] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1923.957289] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67e1c4e-ab9e-44b9-b832-4a52d0ec5d40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.967722] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1923.967974] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ec94d7b-fe76-4fd8-ba96-0e2ac2177aff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.974392] env[62816]: DEBUG oslo_vmware.api [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1923.974392] env[62816]: value = "task-1789217" [ 1923.974392] env[62816]: _type = "Task" [ 1923.974392] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.982508] env[62816]: DEBUG oslo_vmware.api [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.031726] env[62816]: DEBUG nova.compute.manager [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Received event network-changed-8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1924.031940] env[62816]: DEBUG nova.compute.manager [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Refreshing instance network info cache due to event network-changed-8f334780-4169-45ed-aac2-cf2be2b0b27a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1924.032188] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] Acquiring lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.032359] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] Acquired lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.032575] env[62816]: DEBUG nova.network.neutron [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Refreshing network info cache for port 8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1924.122106] env[62816]: INFO nova.network.neutron [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Port 55ff7fac-dd03-49d7-b99b-70da6123e691 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1924.122491] env[62816]: DEBUG nova.network.neutron [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [{"id": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "address": "fa:16:3e:08:ac:0c", "network": {"id": "31f515cd-2053-4577-9ed5-9de5fe666946", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-2022821168-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3d3fae79b00d494daaadfee718781379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap951e7a24-51", "ovs_interfaceid": "951e7a24-5179-43e6-b530-4769ba0ffdb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.307228] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789216, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.390542] env[62816]: DEBUG nova.objects.instance [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'flavor' on Instance uuid 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1924.483989] env[62816]: DEBUG oslo_vmware.api [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789217, 'name': PowerOffVM_Task, 'duration_secs': 0.182648} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.484411] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1924.484489] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1924.484672] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-39bb6705-c021-4b1a-8dcb-4df6270fe69a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.559502] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1924.559704] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1924.559884] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleting the datastore file [datastore1] 251b3ce3-06a4-40d4-ba18-a217650c9152 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1924.560157] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0eb4157-6f00-43b2-b05f-a3e81949f354 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.567508] env[62816]: DEBUG oslo_vmware.api [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1924.567508] env[62816]: value = "task-1789219" [ 1924.567508] env[62816]: _type = "Task" [ 1924.567508] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.574881] env[62816]: DEBUG oslo_vmware.api [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.625557] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Releasing lock "refresh_cache-251b3ce3-06a4-40d4-ba18-a217650c9152" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1924.767251] env[62816]: DEBUG nova.network.neutron [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updated VIF entry in instance network info cache for port 8f334780-4169-45ed-aac2-cf2be2b0b27a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1924.767740] env[62816]: DEBUG nova.network.neutron [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updating instance_info_cache with network_info: [{"id": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "address": "fa:16:3e:67:b5:ca", "network": {"id": "6d86a457-3e10-4528-b3c0-c8a78e3408c1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-872358446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d623d459be54e85890461e933833908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f334780-41", "ovs_interfaceid": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1924.807150] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789216, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.079059] env[62816]: DEBUG oslo_vmware.api [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157529} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.079440] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1925.079584] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1925.079821] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1925.080050] env[62816]: INFO nova.compute.manager [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1925.080390] env[62816]: DEBUG oslo.service.loopingcall [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1925.080630] env[62816]: DEBUG nova.compute.manager [-] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1925.080753] env[62816]: DEBUG nova.network.neutron [-] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1925.129945] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3126525b-4258-477d-997b-9ed0e7616c03 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "interface-251b3ce3-06a4-40d4-ba18-a217650c9152-55ff7fac-dd03-49d7-b99b-70da6123e691" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.602s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.271460] env[62816]: DEBUG oslo_concurrency.lockutils [req-d3ac3c2d-c3ed-4163-8390-4b8b556ace84 req-f349d353-b11e-4a48-b189-a9a72ed9ce77 service nova] Releasing lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.308613] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789216, 'name': CreateVM_Task, 'duration_secs': 1.454168} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.308948] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1925.310075] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.311775] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.311775] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1925.311775] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6c6bb7f-72fd-429c-8cb9-24b898a6c81e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.317334] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1925.317334] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]529018c4-b020-42fe-387b-8055e5344657" [ 1925.317334] env[62816]: _type = "Task" [ 1925.317334] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.326332] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529018c4-b020-42fe-387b-8055e5344657, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.398113] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7913444b-ae45-449c-8299-f36c7dbea228 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.215s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.828227] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]529018c4-b020-42fe-387b-8055e5344657, 'name': SearchDatastore_Task, 'duration_secs': 0.014986} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.828761] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.828761] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1925.828946] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.829265] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.829477] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1925.829774] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95925639-8fcc-4aa3-bd15-bc5fa811822c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.837637] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1925.837818] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1925.838686] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdb6a6c6-7dce-4243-afb8-05ad935353ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.844049] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1925.844049] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5254da99-364b-9cd0-6d3f-8ef863e2c63e" [ 1925.844049] env[62816]: _type = "Task" [ 1925.844049] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.851495] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5254da99-364b-9cd0-6d3f-8ef863e2c63e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.060435] env[62816]: DEBUG nova.compute.manager [req-7a3a2d61-2181-4405-aeb7-539df5e9355e req-3e0e70e0-512c-4bb9-a3a5-96c376480c77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Received event network-vif-deleted-951e7a24-5179-43e6-b530-4769ba0ffdb4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1926.060628] env[62816]: INFO nova.compute.manager [req-7a3a2d61-2181-4405-aeb7-539df5e9355e req-3e0e70e0-512c-4bb9-a3a5-96c376480c77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Neutron deleted interface 951e7a24-5179-43e6-b530-4769ba0ffdb4; detaching it from the instance and deleting it from the info cache [ 1926.060877] env[62816]: DEBUG nova.network.neutron [req-7a3a2d61-2181-4405-aeb7-539df5e9355e req-3e0e70e0-512c-4bb9-a3a5-96c376480c77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.186042] env[62816]: DEBUG nova.network.neutron [-] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.355093] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5254da99-364b-9cd0-6d3f-8ef863e2c63e, 'name': SearchDatastore_Task, 'duration_secs': 0.008315} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.355921] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b055922-caed-488e-9c46-37947fa22444 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.360738] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1926.360738] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5214b50c-8804-2ba9-2467-9a871fcdddd2" [ 1926.360738] env[62816]: _type = "Task" [ 1926.360738] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.367939] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5214b50c-8804-2ba9-2467-9a871fcdddd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.456267] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.456538] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.456750] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.456938] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.457202] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.459273] env[62816]: INFO nova.compute.manager [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Terminating instance [ 1926.460882] env[62816]: DEBUG nova.compute.manager [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1926.461114] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1926.461911] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9f50498-1b08-4b7e-9c88-3c7c8fb1a398 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.469409] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1926.469634] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd6bd971-6aa0-458e-8c1d-6dda141f13d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.478235] env[62816]: DEBUG oslo_vmware.api [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1926.478235] env[62816]: value = "task-1789220" [ 1926.478235] env[62816]: _type = "Task" [ 1926.478235] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.487242] env[62816]: DEBUG oslo_vmware.api [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.564217] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1e5bf11a-d3b6-431c-8cd3-a01aaecb8733 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.573596] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3d804f-6a0f-4a48-a226-357b21dbb9e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.603895] env[62816]: DEBUG nova.compute.manager [req-7a3a2d61-2181-4405-aeb7-539df5e9355e req-3e0e70e0-512c-4bb9-a3a5-96c376480c77 service nova] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Detach interface failed, port_id=951e7a24-5179-43e6-b530-4769ba0ffdb4, reason: Instance 251b3ce3-06a4-40d4-ba18-a217650c9152 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1926.688463] env[62816]: INFO nova.compute.manager [-] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Took 1.61 seconds to deallocate network for instance. [ 1926.871967] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5214b50c-8804-2ba9-2467-9a871fcdddd2, 'name': SearchDatastore_Task, 'duration_secs': 0.008752} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.871967] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.871967] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] cb0a9fc4-6809-4ce9-9521-eb1a115493cf/cb0a9fc4-6809-4ce9-9521-eb1a115493cf.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1926.872235] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdc35abb-0946-4b90-98d8-4fe504d0d52d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.878422] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1926.878422] env[62816]: value = "task-1789221" [ 1926.878422] env[62816]: _type = "Task" [ 1926.878422] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.885650] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.988455] env[62816]: DEBUG oslo_vmware.api [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789220, 'name': PowerOffVM_Task, 'duration_secs': 0.168288} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.988684] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1926.988853] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1926.989128] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f2a8ae1-7c3f-4423-998f-94528af2ea69 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.059333] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1927.059561] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1927.059746] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleting the datastore file [datastore1] 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1927.060030] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-22b2f0b0-aac5-4e55-80bb-35a2dd575c77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.066425] env[62816]: DEBUG oslo_vmware.api [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for the task: (returnval){ [ 1927.066425] env[62816]: value = "task-1789223" [ 1927.066425] env[62816]: _type = "Task" [ 1927.066425] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.074631] env[62816]: DEBUG oslo_vmware.api [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789223, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.195709] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.196147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.196244] env[62816]: DEBUG nova.objects.instance [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'resources' on Instance uuid 251b3ce3-06a4-40d4-ba18-a217650c9152 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1927.387898] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474321} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.388186] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] cb0a9fc4-6809-4ce9-9521-eb1a115493cf/cb0a9fc4-6809-4ce9-9521-eb1a115493cf.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1927.388402] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1927.388649] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ad3ac28-f3b7-4003-b85d-663a0214dc6f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.394341] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1927.394341] env[62816]: value = "task-1789224" [ 1927.394341] env[62816]: _type = "Task" [ 1927.394341] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.402511] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789224, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.576938] env[62816]: DEBUG oslo_vmware.api [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Task: {'id': task-1789223, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377477} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.577211] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1927.577403] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1927.577870] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1927.577870] env[62816]: INFO nova.compute.manager [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1927.578037] env[62816]: DEBUG oslo.service.loopingcall [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1927.578196] env[62816]: DEBUG nova.compute.manager [-] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1927.578291] env[62816]: DEBUG nova.network.neutron [-] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1927.815706] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5a352ae-db77-4849-a6a8-3c144e41e5c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.824443] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b753f9-aae0-422a-8a0b-daf800b656af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.859363] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34dd2005-910d-41f9-9b19-b95bfb0405f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.866420] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb4728e-5acd-4b70-bb6d-3557ba1d1782 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.880481] env[62816]: DEBUG nova.compute.provider_tree [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.904333] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789224, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060133} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.904578] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1927.905358] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833b72e1-de1c-44bb-b8a1-58f7b19a2815 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.927555] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] cb0a9fc4-6809-4ce9-9521-eb1a115493cf/cb0a9fc4-6809-4ce9-9521-eb1a115493cf.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1927.928096] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acdb2dec-6470-4614-95c9-b49bc99ecc2b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.948513] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1927.948513] env[62816]: value = "task-1789225" [ 1927.948513] env[62816]: _type = "Task" [ 1927.948513] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.957061] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789225, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.086806] env[62816]: DEBUG nova.compute.manager [req-07812433-a907-4687-aa88-587882e7d1ab req-e86da4cb-8301-45d4-adb2-95c685ac3f60 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Received event network-vif-deleted-102f4b78-99cb-46f4-9305-2bec7ba02d1d {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1928.086961] env[62816]: INFO nova.compute.manager [req-07812433-a907-4687-aa88-587882e7d1ab req-e86da4cb-8301-45d4-adb2-95c685ac3f60 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Neutron deleted interface 102f4b78-99cb-46f4-9305-2bec7ba02d1d; detaching it from the instance and deleting it from the info cache [ 1928.087150] env[62816]: DEBUG nova.network.neutron [req-07812433-a907-4687-aa88-587882e7d1ab req-e86da4cb-8301-45d4-adb2-95c685ac3f60 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.386405] env[62816]: DEBUG nova.scheduler.client.report [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1928.459224] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789225, 'name': ReconfigVM_Task, 'duration_secs': 0.303177} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.459478] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Reconfigured VM instance instance-00000064 to attach disk [datastore1] cb0a9fc4-6809-4ce9-9521-eb1a115493cf/cb0a9fc4-6809-4ce9-9521-eb1a115493cf.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1928.460056] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e8987b3-1fd8-4a27-830a-8979e92c5379 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.466654] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1928.466654] env[62816]: value = "task-1789226" [ 1928.466654] env[62816]: _type = "Task" [ 1928.466654] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.474135] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789226, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.519679] env[62816]: DEBUG nova.network.neutron [-] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.589905] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-115822b8-8875-4f52-ab14-e2f5e4c71eb6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.599409] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3380b70-8ffc-404b-a464-7c8ad4504e7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.627458] env[62816]: DEBUG nova.compute.manager [req-07812433-a907-4687-aa88-587882e7d1ab req-e86da4cb-8301-45d4-adb2-95c685ac3f60 service nova] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Detach interface failed, port_id=102f4b78-99cb-46f4-9305-2bec7ba02d1d, reason: Instance 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1928.891069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.911847] env[62816]: INFO nova.scheduler.client.report [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted allocations for instance 251b3ce3-06a4-40d4-ba18-a217650c9152 [ 1928.978206] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789226, 'name': Rename_Task, 'duration_secs': 0.142734} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.978649] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1928.979009] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4d1a854-31a5-4682-ae26-4ef58be31651 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.985985] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1928.985985] env[62816]: value = "task-1789227" [ 1928.985985] env[62816]: _type = "Task" [ 1928.985985] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.997126] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789227, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.022789] env[62816]: INFO nova.compute.manager [-] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Took 1.44 seconds to deallocate network for instance. [ 1929.419859] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1d8aa740-0a92-454a-be9b-3bab40f42898 tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "251b3ce3-06a4-40d4-ba18-a217650c9152" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.469s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1929.496194] env[62816]: DEBUG oslo_vmware.api [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789227, 'name': PowerOnVM_Task, 'duration_secs': 0.44627} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.496462] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1929.496680] env[62816]: INFO nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Took 7.66 seconds to spawn the instance on the hypervisor. [ 1929.496861] env[62816]: DEBUG nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1929.497627] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04b5c06d-464c-4cc8-883c-26d8efee8a5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.531560] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.531963] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.532327] env[62816]: DEBUG nova.objects.instance [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lazy-loading 'resources' on Instance uuid 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1930.014124] env[62816]: INFO nova.compute.manager [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Took 12.41 seconds to build instance. [ 1930.126462] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94cc5cb-7b47-49b9-bb50-4757f24ab523 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.134314] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87934c61-832c-4ea5-85a5-d6c040c3f43d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.164493] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b7f2d3-d9ef-49bc-9cee-782296ec0966 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.171836] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45421b15-c4c2-42fa-aca5-aab82387a7e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.184656] env[62816]: DEBUG nova.compute.provider_tree [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1930.516541] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c91a220c-099c-45fe-bbd8-8fb166e934d6 tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.918s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.522138] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.522379] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.522584] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.522799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.523021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.525182] env[62816]: INFO nova.compute.manager [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Terminating instance [ 1930.526950] env[62816]: DEBUG nova.compute.manager [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1930.527168] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1930.528023] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-936f3ebf-ef80-40a9-921c-63f54d1b8449 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.535312] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1930.535513] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9570b816-a5ab-464d-9393-3a549f31d33d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.541399] env[62816]: DEBUG oslo_vmware.api [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1930.541399] env[62816]: value = "task-1789228" [ 1930.541399] env[62816]: _type = "Task" [ 1930.541399] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.549109] env[62816]: DEBUG oslo_vmware.api [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789228, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.687670] env[62816]: DEBUG nova.scheduler.client.report [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1930.827471] env[62816]: DEBUG nova.compute.manager [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Received event network-changed-8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1930.827670] env[62816]: DEBUG nova.compute.manager [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Refreshing instance network info cache due to event network-changed-8f334780-4169-45ed-aac2-cf2be2b0b27a. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1930.827897] env[62816]: DEBUG oslo_concurrency.lockutils [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] Acquiring lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.828056] env[62816]: DEBUG oslo_concurrency.lockutils [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] Acquired lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.828227] env[62816]: DEBUG nova.network.neutron [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Refreshing network info cache for port 8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1931.051894] env[62816]: DEBUG oslo_vmware.api [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789228, 'name': PowerOffVM_Task, 'duration_secs': 0.176612} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.052127] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1931.052305] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1931.052553] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c20c0b7-532c-4386-8db5-ef9e06809392 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.187829] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1931.188078] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1931.188271] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleting the datastore file [datastore1] aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1931.188539] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c9c6dcf-b80d-412a-82a6-83c475351e25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.192539] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.196057] env[62816]: DEBUG oslo_vmware.api [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for the task: (returnval){ [ 1931.196057] env[62816]: value = "task-1789230" [ 1931.196057] env[62816]: _type = "Task" [ 1931.196057] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.203716] env[62816]: DEBUG oslo_vmware.api [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.226334] env[62816]: INFO nova.scheduler.client.report [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Deleted allocations for instance 6f0c72ab-1eaf-4db5-842f-b0ba75739e66 [ 1931.598899] env[62816]: DEBUG nova.network.neutron [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updated VIF entry in instance network info cache for port 8f334780-4169-45ed-aac2-cf2be2b0b27a. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1931.599319] env[62816]: DEBUG nova.network.neutron [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updating instance_info_cache with network_info: [{"id": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "address": "fa:16:3e:67:b5:ca", "network": {"id": "6d86a457-3e10-4528-b3c0-c8a78e3408c1", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-872358446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7d623d459be54e85890461e933833908", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d177c5b3-a5b1-4c78-854e-7e0dbf341ea1", "external-id": "nsx-vlan-transportzone-54", "segmentation_id": 54, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f334780-41", "ovs_interfaceid": "8f334780-4169-45ed-aac2-cf2be2b0b27a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.706214] env[62816]: DEBUG oslo_vmware.api [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Task: {'id': task-1789230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096869} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.706476] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1931.706698] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1931.706916] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1931.707243] env[62816]: INFO nova.compute.manager [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1931.707414] env[62816]: DEBUG oslo.service.loopingcall [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.707647] env[62816]: DEBUG nova.compute.manager [-] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1931.707747] env[62816]: DEBUG nova.network.neutron [-] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1931.734049] env[62816]: DEBUG oslo_concurrency.lockutils [None req-53893d50-b68a-491b-b668-512d96fd2343 tempest-AttachVolumeNegativeTest-1607224647 tempest-AttachVolumeNegativeTest-1607224647-project-member] Lock "6f0c72ab-1eaf-4db5-842f-b0ba75739e66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.277s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.102479] env[62816]: DEBUG oslo_concurrency.lockutils [req-573a02cc-6802-4a40-99d4-e77e8339d789 req-3ab31a85-d7bb-41c1-8046-68a55f4b4b8b service nova] Releasing lock "refresh_cache-cb0a9fc4-6809-4ce9-9521-eb1a115493cf" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.765108] env[62816]: DEBUG nova.network.neutron [-] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.851069] env[62816]: DEBUG nova.compute.manager [req-4b0ffe87-410c-4e71-aa77-d97ef0440b8a req-c4039eef-35da-475b-b550-a2e8a515355a service nova] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Received event network-vif-deleted-9f110684-506a-45d4-bf70-da542c84eeb8 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1933.267415] env[62816]: INFO nova.compute.manager [-] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Took 1.56 seconds to deallocate network for instance. [ 1933.774493] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.774863] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.774947] env[62816]: DEBUG nova.objects.instance [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lazy-loading 'resources' on Instance uuid aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.359806] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42af29b3-9e17-455e-b003-4b0a8997240d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.367174] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a526224-12ee-47f7-97d2-f2d9bc3c441d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.397482] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f64d12d-50a3-4619-abac-17c9686055ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.404762] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80023ef-017c-48f8-8298-370621fbbf66 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.417924] env[62816]: DEBUG nova.compute.provider_tree [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1934.921324] env[62816]: DEBUG nova.scheduler.client.report [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1935.004843] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.005116] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.426584] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.445188] env[62816]: INFO nova.scheduler.client.report [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Deleted allocations for instance aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d [ 1935.508219] env[62816]: INFO nova.compute.manager [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Detaching volume c253876c-91e9-4c8f-b674-798739e9b116 [ 1935.538707] env[62816]: INFO nova.virt.block_device [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Attempting to driver detach volume c253876c-91e9-4c8f-b674-798739e9b116 from mountpoint /dev/sdb [ 1935.538975] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1935.539184] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1935.540069] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494b81c3-44aa-447e-ad73-f3840e0edcc6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.562302] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a461be1-d08d-4b37-a362-9dbd0363ae01 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.569433] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa72a45-ddd6-437c-9458-78263a404da0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.589503] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f68ef45-5f30-4095-a312-c0861ecf7b57 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.603985] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] The volume has not been displaced from its original location: [datastore1] volume-c253876c-91e9-4c8f-b674-798739e9b116/volume-c253876c-91e9-4c8f-b674-798739e9b116.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1935.609104] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1935.609371] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75ad40fb-e7ac-4718-9600-9fef071efcd7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.626841] env[62816]: DEBUG oslo_vmware.api [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1935.626841] env[62816]: value = "task-1789233" [ 1935.626841] env[62816]: _type = "Task" [ 1935.626841] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.634207] env[62816]: DEBUG oslo_vmware.api [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.952729] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e4076c36-1b17-4515-b7bf-494de7b44fdd tempest-AttachInterfacesTestJSON-445942743 tempest-AttachInterfacesTestJSON-445942743-project-member] Lock "aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.430s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.136248] env[62816]: DEBUG oslo_vmware.api [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789233, 'name': ReconfigVM_Task, 'duration_secs': 0.209616} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.136525] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1936.141078] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bda42e2c-a607-4d34-8e2c-2b5b0a8a71ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.155326] env[62816]: DEBUG oslo_vmware.api [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1936.155326] env[62816]: value = "task-1789234" [ 1936.155326] env[62816]: _type = "Task" [ 1936.155326] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.162622] env[62816]: DEBUG oslo_vmware.api [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789234, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.665115] env[62816]: DEBUG oslo_vmware.api [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789234, 'name': ReconfigVM_Task, 'duration_secs': 0.147721} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.665446] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371185', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'name': 'volume-c253876c-91e9-4c8f-b674-798739e9b116', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e26b6593-7e64-4a43-b09d-92d2e668c25b', 'attached_at': '', 'detached_at': '', 'volume_id': 'c253876c-91e9-4c8f-b674-798739e9b116', 'serial': 'c253876c-91e9-4c8f-b674-798739e9b116'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1937.209027] env[62816]: DEBUG nova.objects.instance [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'flavor' on Instance uuid e26b6593-7e64-4a43-b09d-92d2e668c25b {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1938.221305] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5b2ad870-c599-4d86-8196-8cb521d928b6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.216s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.161732] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.161732] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.303310] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.303653] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.303993] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "e26b6593-7e64-4a43-b09d-92d2e668c25b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.304143] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.304365] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.306577] env[62816]: INFO nova.compute.manager [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Terminating instance [ 1939.308482] env[62816]: DEBUG nova.compute.manager [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1939.308712] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1939.309684] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f868ce06-3d8b-4f18-9d09-c8e71bf53118 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.317988] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1939.318244] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e06398a-caa9-43ca-a828-cf452db3c51f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.324219] env[62816]: DEBUG oslo_vmware.api [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1939.324219] env[62816]: value = "task-1789235" [ 1939.324219] env[62816]: _type = "Task" [ 1939.324219] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.332051] env[62816]: DEBUG oslo_vmware.api [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789235, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.384915] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.385199] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.385443] env[62816]: INFO nova.compute.manager [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Shelving [ 1939.667718] env[62816]: DEBUG nova.compute.utils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1939.835335] env[62816]: DEBUG oslo_vmware.api [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789235, 'name': PowerOffVM_Task, 'duration_secs': 0.253319} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.835680] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1939.835853] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1939.836171] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9cad4eaa-9e7d-420c-8253-380e5f2256d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.893313] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1939.893608] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4647b841-a01d-464e-888a-b8d8cc3095e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.900208] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1939.900208] env[62816]: value = "task-1789237" [ 1939.900208] env[62816]: _type = "Task" [ 1939.900208] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.907998] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.914974] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1939.915228] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1939.915414] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] e26b6593-7e64-4a43-b09d-92d2e668c25b {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1939.915670] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4e774f2-252a-49ca-94d5-76a64a9d989c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.921906] env[62816]: DEBUG oslo_vmware.api [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1939.921906] env[62816]: value = "task-1789238" [ 1939.921906] env[62816]: _type = "Task" [ 1939.921906] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.931318] env[62816]: DEBUG oslo_vmware.api [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789238, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.170253] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.410319] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789237, 'name': PowerOffVM_Task, 'duration_secs': 0.225595} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.410613] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1940.411403] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe09b21-0ac3-4277-b4e2-df781a3fb0e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.431864] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda4e830-158d-4aa8-aa3e-9a0c9006a536 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.445452] env[62816]: DEBUG oslo_vmware.api [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789238, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190989} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.446101] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1940.446352] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1940.446614] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1940.446853] env[62816]: INFO nova.compute.manager [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1940.447157] env[62816]: DEBUG oslo.service.loopingcall [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1940.447408] env[62816]: DEBUG nova.compute.manager [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1940.447544] env[62816]: DEBUG nova.network.neutron [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1940.873291] env[62816]: DEBUG nova.compute.manager [req-b7a0f0b8-85bd-482f-81e5-6e4cd85cc3e7 req-190ba576-116f-4016-b5ec-420695c4b9fb service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Received event network-vif-deleted-464c5ce0-30b5-473d-910e-343ba514ffa7 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1940.873532] env[62816]: INFO nova.compute.manager [req-b7a0f0b8-85bd-482f-81e5-6e4cd85cc3e7 req-190ba576-116f-4016-b5ec-420695c4b9fb service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Neutron deleted interface 464c5ce0-30b5-473d-910e-343ba514ffa7; detaching it from the instance and deleting it from the info cache [ 1940.873685] env[62816]: DEBUG nova.network.neutron [req-b7a0f0b8-85bd-482f-81e5-6e4cd85cc3e7 req-190ba576-116f-4016-b5ec-420695c4b9fb service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.947430] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1940.947768] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ec4de619-a2a0-4844-bd7d-502fc189657f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.955952] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1940.955952] env[62816]: value = "task-1789239" [ 1940.955952] env[62816]: _type = "Task" [ 1940.955952] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.964826] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789239, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.255651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.255651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.255651] env[62816]: INFO nova.compute.manager [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Attaching volume 2530eec9-9785-4b73-88ea-3c1e49f8f3c0 to /dev/sdb [ 1941.302411] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b12d5d-9e0a-44fe-8cdf-da0acaf95123 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.311975] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaa5965-f23a-494c-a24a-fc033b705f5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.330864] env[62816]: DEBUG nova.virt.block_device [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating existing volume attachment record: 77fa0985-6871-4d9a-b3b3-7f9b309ee61a {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1941.349208] env[62816]: DEBUG nova.network.neutron [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.376579] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8183d43-d110-4bb5-ab25-f3f667470e40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.389135] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef4a8e6-ac49-4759-8337-ecad474b80ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.421038] env[62816]: DEBUG nova.compute.manager [req-b7a0f0b8-85bd-482f-81e5-6e4cd85cc3e7 req-190ba576-116f-4016-b5ec-420695c4b9fb service nova] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Detach interface failed, port_id=464c5ce0-30b5-473d-910e-343ba514ffa7, reason: Instance e26b6593-7e64-4a43-b09d-92d2e668c25b could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1941.456145] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1941.465152] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789239, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.853220] env[62816]: INFO nova.compute.manager [-] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Took 1.41 seconds to deallocate network for instance. [ 1941.966863] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789239, 'name': CreateSnapshot_Task, 'duration_secs': 0.81873} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.966984] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1941.967752] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e11b16-cd0e-417e-92f5-3061be1c97ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.364840] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.365167] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.365574] env[62816]: DEBUG nova.objects.instance [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'resources' on Instance uuid e26b6593-7e64-4a43-b09d-92d2e668c25b {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1942.466089] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.466385] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.486220] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1942.486912] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-512d1f48-5de9-4d1d-bb42-368823b05f16 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.494772] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1942.494772] env[62816]: value = "task-1789243" [ 1942.494772] env[62816]: _type = "Task" [ 1942.494772] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.502830] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789243, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.964902] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fd9d12-da15-497c-a806-bcc31749ba4a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.968973] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1942.974835] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee60f46f-419f-4651-918a-d3c5cc017050 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.010148] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d6a700-eff3-48b4-9175-2732a32b68c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.021173] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5aa010-2f31-4c9d-8e0c-a90c19d59504 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.024888] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789243, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.035749] env[62816]: DEBUG nova.compute.provider_tree [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1943.457454] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1943.457597] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1943.489808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.516937] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789243, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.538282] env[62816]: DEBUG nova.scheduler.client.report [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1944.016919] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789243, 'name': CloneVM_Task} progress is 95%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.042992] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.678s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.045380] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.556s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.046867] env[62816]: INFO nova.compute.claims [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1944.063815] env[62816]: INFO nova.scheduler.client.report [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted allocations for instance e26b6593-7e64-4a43-b09d-92d2e668c25b [ 1944.516937] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789243, 'name': CloneVM_Task, 'duration_secs': 1.610672} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.517277] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Created linked-clone VM from snapshot [ 1944.518089] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4969dd-f027-430a-b217-f21ff0cf53bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.525696] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Uploading image 6576f905-3e87-412b-9611-f955fc53c2e8 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1944.551598] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1944.551598] env[62816]: value = "vm-371196" [ 1944.551598] env[62816]: _type = "VirtualMachine" [ 1944.551598] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1944.553931] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f8fb5f69-9caa-455b-9ea1-10c0494e99f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.561993] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease: (returnval){ [ 1944.561993] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237359d-caa6-4aaa-b192-0528dc0b3a46" [ 1944.561993] env[62816]: _type = "HttpNfcLease" [ 1944.561993] env[62816]: } obtained for exporting VM: (result){ [ 1944.561993] env[62816]: value = "vm-371196" [ 1944.561993] env[62816]: _type = "VirtualMachine" [ 1944.561993] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1944.562301] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the lease: (returnval){ [ 1944.562301] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237359d-caa6-4aaa-b192-0528dc0b3a46" [ 1944.562301] env[62816]: _type = "HttpNfcLease" [ 1944.562301] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1944.572544] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1944.572544] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237359d-caa6-4aaa-b192-0528dc0b3a46" [ 1944.572544] env[62816]: _type = "HttpNfcLease" [ 1944.572544] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1944.573165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c474bdaa-b70b-4a85-b44e-82e28c99e9f8 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "e26b6593-7e64-4a43-b09d-92d2e668c25b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.269s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.071147] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1945.071147] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237359d-caa6-4aaa-b192-0528dc0b3a46" [ 1945.071147] env[62816]: _type = "HttpNfcLease" [ 1945.071147] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1945.071540] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1945.071540] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5237359d-caa6-4aaa-b192-0528dc0b3a46" [ 1945.071540] env[62816]: _type = "HttpNfcLease" [ 1945.071540] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1945.072319] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe686f48-0d6a-41b0-abdf-55485e723378 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.082837] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52508dba-c637-3978-88be-0357dcdb4462/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1945.082837] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52508dba-c637-3978-88be-0357dcdb4462/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1945.173845] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b22542ad-6b2a-4048-b9a3-c70ddd8f5080 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.190444] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5053ac-7182-408e-9915-a5c29c254b94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.204893] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f4c384-f948-4a0c-a93a-1e70be87583e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.237442] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a907f5c-60bd-4b53-b6a6-50580f433f1b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.244939] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1478e86-f7ec-4cd0-b4d6-de63d3a121ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.258198] env[62816]: DEBUG nova.compute.provider_tree [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1945.469286] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1945.469540] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.469708] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.469880] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.470038] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1945.470192] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1945.761807] env[62816]: DEBUG nova.scheduler.client.report [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1945.973050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.127397] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.127754] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.269088] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.269792] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1946.273558] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.300s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.273865] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.274423] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1946.275944] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21362f40-305b-4077-958d-5a63587a2f94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.285997] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a1485c-83a9-4e06-a0bf-f317ac31097e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.302965] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66ec799-0e95-4cc9-b6b5-af8c5a20f5fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.310487] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68b8c8f-b550-45f5-bdce-513dd4619361 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.343723] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179936MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1946.343950] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.344189] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.389460] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1946.389708] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371195', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'name': 'volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '341bf195-e528-4e3b-8636-fac7a383d228', 'attached_at': '', 'detached_at': '', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'serial': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1946.390615] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672501e4-a8b9-4522-b780-783cbf5956d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.408417] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd159b26-e101-406d-8ce3-77125bcefb44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.433092] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0/volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1946.433494] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa8a0ae8-0e52-447c-b5bf-24b1746cdb2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.452824] env[62816]: DEBUG oslo_vmware.api [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1946.452824] env[62816]: value = "task-1789246" [ 1946.452824] env[62816]: _type = "Task" [ 1946.452824] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.461227] env[62816]: DEBUG oslo_vmware.api [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789246, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.630640] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1946.776763] env[62816]: DEBUG nova.compute.utils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1946.778336] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1946.778581] env[62816]: DEBUG nova.network.neutron [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1946.828152] env[62816]: DEBUG nova.policy [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c5023a18d243b7aafb6d6181f931d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e070d1729247ff83b4ff6997b45385', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1946.962017] env[62816]: DEBUG oslo_vmware.api [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789246, 'name': ReconfigVM_Task, 'duration_secs': 0.369494} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.962415] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0/volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1946.967173] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a22a8b4-a888-44ba-a392-3d7a97df6782 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.983463] env[62816]: DEBUG oslo_vmware.api [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1946.983463] env[62816]: value = "task-1789247" [ 1946.983463] env[62816]: _type = "Task" [ 1946.983463] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.992957] env[62816]: DEBUG oslo_vmware.api [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789247, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.079862] env[62816]: DEBUG nova.network.neutron [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Successfully created port: 4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1947.153771] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.282297] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1947.374532] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4ab07a21-2685-42bc-af13-b95473993d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1947.374802] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 341bf195-e528-4e3b-8636-fac7a383d228 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1947.374978] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5b87e09d-ae08-4936-8479-c845e25b31b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1947.375168] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance cb0a9fc4-6809-4ce9-9521-eb1a115493cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1947.375397] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1947.493631] env[62816]: DEBUG oslo_vmware.api [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789247, 'name': ReconfigVM_Task, 'duration_secs': 0.140045} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.494049] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371195', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'name': 'volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '341bf195-e528-4e3b-8636-fac7a383d228', 'attached_at': '', 'detached_at': '', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'serial': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1947.879673] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5e76d63c-b05c-4e8b-8b90-6110bd7d654c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1947.880150] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1947.880150] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1947.967712] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5c208b-a353-4e9a-803c-7e982c74a9ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.976815] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8291df8a-87fc-41b2-aaf7-4f10ca5c9afe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.009746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eea9b522-8b50-430e-9a3f-be639f4e75a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.017571] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1a1014-2dbe-4bfb-a2fc-74d4590acab5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.031182] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1948.291716] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1948.317704] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1948.317950] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1948.318129] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1948.318317] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1948.318465] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1948.318615] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1948.318823] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1948.318984] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1948.319168] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1948.319332] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1948.319508] env[62816]: DEBUG nova.virt.hardware [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1948.320454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc9715b-6ba1-4e13-a754-d772887665ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.330175] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b93e7f-bd41-4f0c-8eb8-0ff8014abcb0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.536586] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1948.545146] env[62816]: DEBUG nova.objects.instance [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'flavor' on Instance uuid 341bf195-e528-4e3b-8636-fac7a383d228 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1948.556331] env[62816]: DEBUG nova.compute.manager [req-88fa4436-a777-47df-898e-72b3bf255232 req-15921e39-1f25-4e5d-99db-63c7b43dae9b service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Received event network-vif-plugged-4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1948.556331] env[62816]: DEBUG oslo_concurrency.lockutils [req-88fa4436-a777-47df-898e-72b3bf255232 req-15921e39-1f25-4e5d-99db-63c7b43dae9b service nova] Acquiring lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.556331] env[62816]: DEBUG oslo_concurrency.lockutils [req-88fa4436-a777-47df-898e-72b3bf255232 req-15921e39-1f25-4e5d-99db-63c7b43dae9b service nova] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.556331] env[62816]: DEBUG oslo_concurrency.lockutils [req-88fa4436-a777-47df-898e-72b3bf255232 req-15921e39-1f25-4e5d-99db-63c7b43dae9b service nova] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.556331] env[62816]: DEBUG nova.compute.manager [req-88fa4436-a777-47df-898e-72b3bf255232 req-15921e39-1f25-4e5d-99db-63c7b43dae9b service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] No waiting events found dispatching network-vif-plugged-4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1948.556331] env[62816]: WARNING nova.compute.manager [req-88fa4436-a777-47df-898e-72b3bf255232 req-15921e39-1f25-4e5d-99db-63c7b43dae9b service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Received unexpected event network-vif-plugged-4af57358-64d9-46b0-9d94-dad516307565 for instance with vm_state building and task_state spawning. [ 1948.639430] env[62816]: DEBUG nova.network.neutron [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Successfully updated port: 4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1949.046068] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1949.046355] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.702s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.046584] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.893s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.048207] env[62816]: INFO nova.compute.claims [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1949.052860] env[62816]: DEBUG oslo_concurrency.lockutils [None req-4d131f5c-2cbc-4e4e-b629-a6ef3db7de1c tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.799s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.141725] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.141931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.142189] env[62816]: DEBUG nova.network.neutron [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1949.675333] env[62816]: DEBUG nova.network.neutron [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1949.811704] env[62816]: DEBUG nova.network.neutron [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Updating instance_info_cache with network_info: [{"id": "4af57358-64d9-46b0-9d94-dad516307565", "address": "fa:16:3e:df:bc:47", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4af57358-64", "ovs_interfaceid": "4af57358-64d9-46b0-9d94-dad516307565", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.150611] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dba7d33-74e2-4cd0-88ee-871e78ca8749 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.157995] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca80605-e366-4a17-9d39-ab01b51fcef7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.190318] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5d853d-5a68-47cb-9f1a-7de7c7adefb2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.198125] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb09f6c-9e37-4087-96dc-48ebd49fa01e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.211748] env[62816]: DEBUG nova.compute.provider_tree [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1950.314580] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.315032] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Instance network_info: |[{"id": "4af57358-64d9-46b0-9d94-dad516307565", "address": "fa:16:3e:df:bc:47", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4af57358-64", "ovs_interfaceid": "4af57358-64d9-46b0-9d94-dad516307565", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1950.315556] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:bc:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4af57358-64d9-46b0-9d94-dad516307565', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1950.323904] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating folder: Project (47e070d1729247ff83b4ff6997b45385). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1950.324274] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a38b16fc-df9e-4be4-8b06-3cbb95c09b0e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.335545] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created folder: Project (47e070d1729247ff83b4ff6997b45385) in parent group-v370905. [ 1950.335771] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating folder: Instances. Parent ref: group-v371197. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1950.336071] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae856b3d-82cd-414a-8d20-596c48349a34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.346063] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created folder: Instances in parent group-v371197. [ 1950.346221] env[62816]: DEBUG oslo.service.loopingcall [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.346415] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1950.346673] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9982adfc-18e7-4203-a173-1af81504b43a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.368427] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1950.368427] env[62816]: value = "task-1789250" [ 1950.368427] env[62816]: _type = "Task" [ 1950.368427] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.376218] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789250, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.417948] env[62816]: DEBUG nova.compute.manager [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1950.582668] env[62816]: DEBUG nova.compute.manager [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Received event network-changed-4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1950.582802] env[62816]: DEBUG nova.compute.manager [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Refreshing instance network info cache due to event network-changed-4af57358-64d9-46b0-9d94-dad516307565. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1950.583103] env[62816]: DEBUG oslo_concurrency.lockutils [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] Acquiring lock "refresh_cache-fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.583247] env[62816]: DEBUG oslo_concurrency.lockutils [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] Acquired lock "refresh_cache-fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.583465] env[62816]: DEBUG nova.network.neutron [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Refreshing network info cache for port 4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1950.715272] env[62816]: DEBUG nova.scheduler.client.report [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1950.880124] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789250, 'name': CreateVM_Task, 'duration_secs': 0.365693} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.880306] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1950.880988] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.881193] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.881554] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1950.881828] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fd16813-7d56-400e-bdc7-9c3070cef713 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.886399] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1950.886399] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5281be92-21bc-c288-560b-d4be5acafd69" [ 1950.886399] env[62816]: _type = "Task" [ 1950.886399] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.893984] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281be92-21bc-c288-560b-d4be5acafd69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.938937] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.036733] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.037013] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.037185] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1951.220656] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.221225] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1951.224041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.285s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.300453] env[62816]: DEBUG nova.network.neutron [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Updated VIF entry in instance network info cache for port 4af57358-64d9-46b0-9d94-dad516307565. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1951.300854] env[62816]: DEBUG nova.network.neutron [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Updating instance_info_cache with network_info: [{"id": "4af57358-64d9-46b0-9d94-dad516307565", "address": "fa:16:3e:df:bc:47", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4af57358-64", "ovs_interfaceid": "4af57358-64d9-46b0-9d94-dad516307565", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.396857] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281be92-21bc-c288-560b-d4be5acafd69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.727498] env[62816]: DEBUG nova.compute.utils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1951.728962] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1951.729142] env[62816]: DEBUG nova.network.neutron [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1951.733241] env[62816]: INFO nova.compute.claims [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1951.801974] env[62816]: DEBUG nova.policy [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0a2129bc83a45d695730796b55f1caf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72d49b085afa4df99700ea4e15e9c87e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1951.803764] env[62816]: DEBUG oslo_concurrency.lockutils [req-a94e3e2a-019b-4973-b7e9-3555660189c1 req-62fe3f74-63df-47c5-88f2-084502da6c99 service nova] Releasing lock "refresh_cache-fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.899202] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281be92-21bc-c288-560b-d4be5acafd69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.065148] env[62816]: DEBUG nova.network.neutron [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Successfully created port: 738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1952.237536] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1952.241514] env[62816]: INFO nova.compute.resource_tracker [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating resource usage from migration 090d0a64-f039-4520-a304-542d76707944 [ 1952.347038] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0035cd2f-b52f-4160-a912-15d4f00ac941 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.354445] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63587605-9453-4611-8c4e-27e9f2e4ba0a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.385472] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece41f71-5c6a-4440-8328-4a30f90b9182 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.395646] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabd66ee-faa1-4ec7-930c-e81f11f8c0bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.402156] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281be92-21bc-c288-560b-d4be5acafd69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.412779] env[62816]: DEBUG nova.compute.provider_tree [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1952.683011] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52508dba-c637-3978-88be-0357dcdb4462/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1952.684328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142add84-6ddc-4e27-bffa-ac33212330a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.690707] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52508dba-c637-3978-88be-0357dcdb4462/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1952.690903] env[62816]: ERROR oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52508dba-c637-3978-88be-0357dcdb4462/disk-0.vmdk due to incomplete transfer. [ 1952.691174] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d3e41b35-3860-43ec-85a8-398d9bfb5a47 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.698759] env[62816]: DEBUG oslo_vmware.rw_handles [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52508dba-c637-3978-88be-0357dcdb4462/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1952.699184] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Uploaded image 6576f905-3e87-412b-9611-f955fc53c2e8 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1952.701714] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1952.701943] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-9e75ef92-934c-4806-adc9-575ff2333b09 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.708011] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1952.708011] env[62816]: value = "task-1789251" [ 1952.708011] env[62816]: _type = "Task" [ 1952.708011] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.715896] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789251, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.900522] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5281be92-21bc-c288-560b-d4be5acafd69, 'name': SearchDatastore_Task, 'duration_secs': 1.980316} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.900848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.901105] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1952.901342] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.901504] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.901677] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1952.901947] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d111da41-1004-4701-8670-c8f77e644818 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.910598] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1952.910782] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1952.911535] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdff3ce6-347e-4823-9991-a214f9dd80e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.916193] env[62816]: DEBUG nova.scheduler.client.report [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1952.920385] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1952.920385] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523a26aa-5476-6328-3cf5-2ffbe3246565" [ 1952.920385] env[62816]: _type = "Task" [ 1952.920385] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.928474] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523a26aa-5476-6328-3cf5-2ffbe3246565, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.217344] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789251, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.250965] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1953.274545] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1953.274828] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1953.275025] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1953.275245] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1953.275415] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1953.275582] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1953.275870] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1953.276078] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1953.276275] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1953.276472] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1953.276694] env[62816]: DEBUG nova.virt.hardware [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1953.277677] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5e8f19-459f-4430-8fc0-aeb9ae589b4c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.286246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2531816-16d3-4b26-ad73-6da494496dac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.422221] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.198s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.422454] env[62816]: INFO nova.compute.manager [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Migrating [ 1953.446047] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523a26aa-5476-6328-3cf5-2ffbe3246565, 'name': SearchDatastore_Task, 'duration_secs': 0.010195} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.446874] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-540cd82d-488b-4a2c-b2f0-8fcbd49df0ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.452727] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1953.452727] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520f5ec2-f5b2-4e3f-a809-4906fb30726a" [ 1953.452727] env[62816]: _type = "Task" [ 1953.452727] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.458106] env[62816]: DEBUG nova.compute.manager [req-4a5b0a8f-9e55-4e1f-ad16-0a4325329b67 req-a66cf818-47da-4220-a11a-638dbc3fe626 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Received event network-vif-plugged-738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1953.458316] env[62816]: DEBUG oslo_concurrency.lockutils [req-4a5b0a8f-9e55-4e1f-ad16-0a4325329b67 req-a66cf818-47da-4220-a11a-638dbc3fe626 service nova] Acquiring lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.458542] env[62816]: DEBUG oslo_concurrency.lockutils [req-4a5b0a8f-9e55-4e1f-ad16-0a4325329b67 req-a66cf818-47da-4220-a11a-638dbc3fe626 service nova] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.458737] env[62816]: DEBUG oslo_concurrency.lockutils [req-4a5b0a8f-9e55-4e1f-ad16-0a4325329b67 req-a66cf818-47da-4220-a11a-638dbc3fe626 service nova] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.458936] env[62816]: DEBUG nova.compute.manager [req-4a5b0a8f-9e55-4e1f-ad16-0a4325329b67 req-a66cf818-47da-4220-a11a-638dbc3fe626 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] No waiting events found dispatching network-vif-plugged-738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1953.459121] env[62816]: WARNING nova.compute.manager [req-4a5b0a8f-9e55-4e1f-ad16-0a4325329b67 req-a66cf818-47da-4220-a11a-638dbc3fe626 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Received unexpected event network-vif-plugged-738d894d-6a65-4c5d-891f-2c14246c82f4 for instance with vm_state building and task_state spawning. [ 1953.464947] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520f5ec2-f5b2-4e3f-a809-4906fb30726a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.540976] env[62816]: DEBUG nova.network.neutron [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Successfully updated port: 738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1953.719043] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789251, 'name': Destroy_Task, 'duration_secs': 0.572615} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.719337] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Destroyed the VM [ 1953.719594] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1953.720157] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a00f08ca-ebe3-4664-ac6b-de44999aa4eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.725947] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1953.725947] env[62816]: value = "task-1789252" [ 1953.725947] env[62816]: _type = "Task" [ 1953.725947] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.733579] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789252, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.941108] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.941347] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.941552] env[62816]: DEBUG nova.network.neutron [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1953.969128] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520f5ec2-f5b2-4e3f-a809-4906fb30726a, 'name': SearchDatastore_Task, 'duration_secs': 0.012362} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.970194] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.970485] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4/fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1953.970770] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff93f6a5-6d6a-4de0-a26f-96c6543fca17 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.978070] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1953.978070] env[62816]: value = "task-1789253" [ 1953.978070] env[62816]: _type = "Task" [ 1953.978070] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.986850] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.044020] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.044192] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.044345] env[62816]: DEBUG nova.network.neutron [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1954.236290] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789252, 'name': RemoveSnapshot_Task, 'duration_secs': 0.314008} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.236685] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1954.237105] env[62816]: DEBUG nova.compute.manager [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1954.237979] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2fd3732-e252-4051-a811-86f1a77195dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.487560] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789253, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.576127] env[62816]: DEBUG nova.network.neutron [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1954.666051] env[62816]: DEBUG nova.network.neutron [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.703804] env[62816]: DEBUG nova.network.neutron [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updating instance_info_cache with network_info: [{"id": "738d894d-6a65-4c5d-891f-2c14246c82f4", "address": "fa:16:3e:9b:ac:53", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738d894d-6a", "ovs_interfaceid": "738d894d-6a65-4c5d-891f-2c14246c82f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.750877] env[62816]: INFO nova.compute.manager [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Shelve offloading [ 1954.752493] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1954.752740] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a8e4108-2bb0-48c6-ac6a-a9660adcb016 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.762337] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1954.762337] env[62816]: value = "task-1789254" [ 1954.762337] env[62816]: _type = "Task" [ 1954.762337] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.769977] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789254, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.988550] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789253, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.169367] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.206854] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.207203] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Instance network_info: |[{"id": "738d894d-6a65-4c5d-891f-2c14246c82f4", "address": "fa:16:3e:9b:ac:53", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738d894d-6a", "ovs_interfaceid": "738d894d-6a65-4c5d-891f-2c14246c82f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1955.207739] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:ac:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '738d894d-6a65-4c5d-891f-2c14246c82f4', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1955.215661] env[62816]: DEBUG oslo.service.loopingcall [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1955.215874] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1955.216233] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9df85348-fdde-4964-9bc0-74ed7d7a29c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.236433] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1955.236433] env[62816]: value = "task-1789255" [ 1955.236433] env[62816]: _type = "Task" [ 1955.236433] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.243844] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789255, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.271552] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1955.271817] env[62816]: DEBUG nova.compute.manager [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1955.272578] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c30b252-7ee9-4c27-a365-795eb40601a2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.277762] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.277969] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.278186] env[62816]: DEBUG nova.network.neutron [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1955.485730] env[62816]: DEBUG nova.compute.manager [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Received event network-changed-738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1955.485956] env[62816]: DEBUG nova.compute.manager [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Refreshing instance network info cache due to event network-changed-738d894d-6a65-4c5d-891f-2c14246c82f4. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1955.486178] env[62816]: DEBUG oslo_concurrency.lockutils [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] Acquiring lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.486321] env[62816]: DEBUG oslo_concurrency.lockutils [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] Acquired lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.486500] env[62816]: DEBUG nova.network.neutron [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Refreshing network info cache for port 738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1955.493290] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789253, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.240496} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.493567] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4/fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1955.493782] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1955.494512] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a0fd020-87b8-41b3-80fe-5c6fb6abb952 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.501235] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1955.501235] env[62816]: value = "task-1789256" [ 1955.501235] env[62816]: _type = "Task" [ 1955.501235] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.509095] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.747080] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789255, 'name': CreateVM_Task, 'duration_secs': 0.421768} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.747260] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1955.747861] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.748038] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.748356] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1955.748594] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c493f92-0335-4191-906d-8882800a4a33 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.753285] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1955.753285] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5257449a-1e2f-eb62-7cb5-b2fbef1c641c" [ 1955.753285] env[62816]: _type = "Task" [ 1955.753285] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.760856] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5257449a-1e2f-eb62-7cb5-b2fbef1c641c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.970533] env[62816]: DEBUG nova.network.neutron [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.012172] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063838} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.012172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1956.012969] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5ba989-7a62-4313-8689-67eb3dbd8815 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.036914] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4/fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1956.036914] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4055fb2f-ecac-4255-a0cb-30495b05dbd2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.056603] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1956.056603] env[62816]: value = "task-1789257" [ 1956.056603] env[62816]: _type = "Task" [ 1956.056603] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.064548] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789257, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.226769] env[62816]: DEBUG nova.network.neutron [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updated VIF entry in instance network info cache for port 738d894d-6a65-4c5d-891f-2c14246c82f4. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1956.227134] env[62816]: DEBUG nova.network.neutron [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updating instance_info_cache with network_info: [{"id": "738d894d-6a65-4c5d-891f-2c14246c82f4", "address": "fa:16:3e:9b:ac:53", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738d894d-6a", "ovs_interfaceid": "738d894d-6a65-4c5d-891f-2c14246c82f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.262878] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5257449a-1e2f-eb62-7cb5-b2fbef1c641c, 'name': SearchDatastore_Task, 'duration_secs': 0.009628} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.263220] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.263538] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1956.263806] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.263958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.264183] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1956.264446] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a68004f7-5f6c-4b77-a312-60a40495d271 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.275805] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1956.276012] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1956.276768] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c22f4c69-11c5-47d8-85ed-9c061f99c246 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.281901] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1956.281901] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521cfeea-2282-a053-2276-23dd57da19a2" [ 1956.281901] env[62816]: _type = "Task" [ 1956.281901] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.289799] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521cfeea-2282-a053-2276-23dd57da19a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.473830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.566094] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789257, 'name': ReconfigVM_Task, 'duration_secs': 0.260561} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.566374] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Reconfigured VM instance instance-00000065 to attach disk [datastore1] fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4/fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1956.567281] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2273b10a-eb72-4b5f-ab02-cab5fa9124dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.573398] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1956.573398] env[62816]: value = "task-1789258" [ 1956.573398] env[62816]: _type = "Task" [ 1956.573398] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.581396] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789258, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.685059] env[62816]: DEBUG nova.compute.manager [req-64e8f7da-65eb-4ca6-bca2-04c70c3cd3c7 req-c290cdaf-2875-4a0e-b68c-b9115af7db96 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-vif-unplugged-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1956.685374] env[62816]: DEBUG oslo_concurrency.lockutils [req-64e8f7da-65eb-4ca6-bca2-04c70c3cd3c7 req-c290cdaf-2875-4a0e-b68c-b9115af7db96 service nova] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.685695] env[62816]: DEBUG oslo_concurrency.lockutils [req-64e8f7da-65eb-4ca6-bca2-04c70c3cd3c7 req-c290cdaf-2875-4a0e-b68c-b9115af7db96 service nova] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.685964] env[62816]: DEBUG oslo_concurrency.lockutils [req-64e8f7da-65eb-4ca6-bca2-04c70c3cd3c7 req-c290cdaf-2875-4a0e-b68c-b9115af7db96 service nova] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.686166] env[62816]: DEBUG nova.compute.manager [req-64e8f7da-65eb-4ca6-bca2-04c70c3cd3c7 req-c290cdaf-2875-4a0e-b68c-b9115af7db96 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] No waiting events found dispatching network-vif-unplugged-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1956.686341] env[62816]: WARNING nova.compute.manager [req-64e8f7da-65eb-4ca6-bca2-04c70c3cd3c7 req-c290cdaf-2875-4a0e-b68c-b9115af7db96 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received unexpected event network-vif-unplugged-850c89e0-1047-4847-b1c9-d9fd0435045e for instance with vm_state shelved and task_state shelving_offloading. [ 1956.688308] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7009f9db-cd07-43d0-924c-13acc5978c1c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.710826] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1956.730121] env[62816]: DEBUG oslo_concurrency.lockutils [req-a5799b8e-cdd1-474a-ab2d-ad5538af4316 req-02cb3ab6-5b90-4cdf-84fc-57610656e227 service nova] Releasing lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.774747] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1956.775644] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0657ce8-1737-4501-9a98-7b9dce5a0b4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.784951] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1956.787613] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-18f3f55d-f455-47ba-8dac-556459ca35aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.794209] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521cfeea-2282-a053-2276-23dd57da19a2, 'name': SearchDatastore_Task, 'duration_secs': 0.010949} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.794893] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d2f424d-2301-4e43-b0f3-bc90eefc0ef8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.799645] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1956.799645] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a000d2-81a9-4e89-fa87-bbc607fc1296" [ 1956.799645] env[62816]: _type = "Task" [ 1956.799645] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.806467] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a000d2-81a9-4e89-fa87-bbc607fc1296, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.913504] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1956.913802] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1956.913997] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleting the datastore file [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1956.914287] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2291a9af-349d-46ed-8eea-a00125382a89 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.920976] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1956.920976] env[62816]: value = "task-1789260" [ 1956.920976] env[62816]: _type = "Task" [ 1956.920976] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.928337] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.083508] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789258, 'name': Rename_Task, 'duration_secs': 0.150576} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.083857] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1957.084138] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92e8cdc5-c32d-4a76-85f8-e097f47bfe59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.091009] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1957.091009] env[62816]: value = "task-1789261" [ 1957.091009] env[62816]: _type = "Task" [ 1957.091009] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.098435] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.218067] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1957.218067] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1879e18-3dfb-488d-86c8-dddab8f87400 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.225334] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1957.225334] env[62816]: value = "task-1789262" [ 1957.225334] env[62816]: _type = "Task" [ 1957.225334] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.233460] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.310019] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a000d2-81a9-4e89-fa87-bbc607fc1296, 'name': SearchDatastore_Task, 'duration_secs': 0.010332} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.310329] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1957.310618] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 5e76d63c-b05c-4e8b-8b90-6110bd7d654c/5e76d63c-b05c-4e8b-8b90-6110bd7d654c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1957.310883] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c97b7515-a266-40aa-8807-895acfa83307 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.317466] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1957.317466] env[62816]: value = "task-1789263" [ 1957.317466] env[62816]: _type = "Task" [ 1957.317466] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.325194] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789263, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.430291] env[62816]: DEBUG oslo_vmware.api [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1761} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.430573] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1957.430799] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1957.431020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1957.457081] env[62816]: INFO nova.scheduler.client.report [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted allocations for instance 5b87e09d-ae08-4936-8479-c845e25b31b4 [ 1957.601031] env[62816]: DEBUG oslo_vmware.api [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789261, 'name': PowerOnVM_Task, 'duration_secs': 0.459089} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.601351] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1957.601564] env[62816]: INFO nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1957.601784] env[62816]: DEBUG nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1957.602597] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b383735d-42e4-44ef-b093-700e8ef139fb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.737240] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789262, 'name': PowerOffVM_Task, 'duration_secs': 0.208174} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.737518] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1957.737704] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1957.827444] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789263, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.962325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.962568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.962779] env[62816]: DEBUG nova.objects.instance [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'resources' on Instance uuid 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1958.118583] env[62816]: INFO nova.compute.manager [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Took 14.64 seconds to build instance. [ 1958.244308] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1958.244568] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1958.244892] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1958.245211] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1958.245358] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1958.245461] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1958.245668] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1958.245836] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1958.246258] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1958.246258] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1958.247054] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1958.251867] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cf2f37e-4a28-446a-8b60-941e894998b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.268026] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1958.268026] env[62816]: value = "task-1789264" [ 1958.268026] env[62816]: _type = "Task" [ 1958.268026] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.276169] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789264, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.327168] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789263, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.894934} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.327417] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 5e76d63c-b05c-4e8b-8b90-6110bd7d654c/5e76d63c-b05c-4e8b-8b90-6110bd7d654c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1958.327647] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1958.327905] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-694d104b-1362-453d-94e6-97b1ccb6b9e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.334619] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1958.334619] env[62816]: value = "task-1789265" [ 1958.334619] env[62816]: _type = "Task" [ 1958.334619] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.342068] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789265, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.465579] env[62816]: DEBUG nova.objects.instance [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'numa_topology' on Instance uuid 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1958.621144] env[62816]: DEBUG oslo_concurrency.lockutils [None req-716a96be-c42e-420f-9beb-7d233a68ab32 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.155s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.719334] env[62816]: DEBUG nova.compute.manager [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1958.719491] env[62816]: DEBUG nova.compute.manager [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing instance network info cache due to event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1958.719710] env[62816]: DEBUG oslo_concurrency.lockutils [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1958.719854] env[62816]: DEBUG oslo_concurrency.lockutils [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1958.720027] env[62816]: DEBUG nova.network.neutron [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1958.778302] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789264, 'name': ReconfigVM_Task, 'duration_secs': 0.369736} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.778673] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1958.843334] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789265, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06729} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.843599] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1958.844400] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e63b91-4be2-4ffe-b688-8309e406fe38 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.867338] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 5e76d63c-b05c-4e8b-8b90-6110bd7d654c/5e76d63c-b05c-4e8b-8b90-6110bd7d654c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1958.867569] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40281cc0-b2c0-467e-ab56-2ee67e74d3b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.886115] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1958.886115] env[62816]: value = "task-1789266" [ 1958.886115] env[62816]: _type = "Task" [ 1958.886115] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.893773] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.968195] env[62816]: DEBUG nova.objects.base [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Object Instance<5b87e09d-ae08-4936-8479-c845e25b31b4> lazy-loaded attributes: resources,numa_topology {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1959.042924] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c5e6b7-3882-46a6-96b6-85a76e26d1cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.050406] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114fa2f8-92b9-44ba-b023-1c6d0a1d0ed0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.079154] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d2f8fa-287d-4320-9d6d-c236ec8cd797 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.085817] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719f1a18-7f9e-408c-b390-8b12161d180e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.098598] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.098826] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.099038] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.099223] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.099389] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.100956] env[62816]: DEBUG nova.compute.provider_tree [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1959.102294] env[62816]: INFO nova.compute.manager [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Terminating instance [ 1959.103849] env[62816]: DEBUG nova.compute.manager [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1959.104060] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1959.104758] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b10864a-c2df-4ab4-bde8-8e5e891be33b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.111116] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1959.111326] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1de35b8-4ec0-46f6-a801-f757de9541df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.116892] env[62816]: DEBUG oslo_vmware.api [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1959.116892] env[62816]: value = "task-1789267" [ 1959.116892] env[62816]: _type = "Task" [ 1959.116892] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.123969] env[62816]: DEBUG oslo_vmware.api [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.285458] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1959.285717] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1959.285889] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1959.286085] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1959.286236] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1959.286391] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1959.286805] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1959.286973] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1959.287173] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1959.287347] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1959.287623] env[62816]: DEBUG nova.virt.hardware [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1959.293243] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1959.296063] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-720ddccb-eb37-4492-80e3-9f916545eb90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.315274] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1959.315274] env[62816]: value = "task-1789268" [ 1959.315274] env[62816]: _type = "Task" [ 1959.315274] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.325793] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789268, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.396377] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789266, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.607253] env[62816]: DEBUG nova.scheduler.client.report [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1959.631086] env[62816]: DEBUG oslo_vmware.api [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789267, 'name': PowerOffVM_Task, 'duration_secs': 0.22342} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.631440] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1959.631623] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1959.631944] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6429bc27-2bc2-468a-a526-226538c8be35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.736919] env[62816]: DEBUG nova.network.neutron [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updated VIF entry in instance network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1959.737416] env[62816]: DEBUG nova.network.neutron [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap850c89e0-10", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1959.827326] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1959.827561] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1959.827748] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleting the datastore file [datastore1] fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1959.828041] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789268, 'name': ReconfigVM_Task, 'duration_secs': 0.472334} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.828249] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bfd0082-51c8-4db3-a9d8-ff5c3e7e966b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.830072] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1959.830848] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e7bdd4-ebe8-4a24-b76b-f0d4c08d95f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.860702] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1959.862371] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-798e3e77-1e4e-4764-86ac-9f1f0bef1bb2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.875383] env[62816]: DEBUG oslo_vmware.api [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1959.875383] env[62816]: value = "task-1789270" [ 1959.875383] env[62816]: _type = "Task" [ 1959.875383] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.881600] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1959.881600] env[62816]: value = "task-1789271" [ 1959.881600] env[62816]: _type = "Task" [ 1959.881600] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.892840] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789271, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.897259] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789266, 'name': ReconfigVM_Task, 'duration_secs': 1.002564} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.897523] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 5e76d63c-b05c-4e8b-8b90-6110bd7d654c/5e76d63c-b05c-4e8b-8b90-6110bd7d654c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1959.898140] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18f86195-daff-4785-8ca8-466b040738d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.905992] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1959.905992] env[62816]: value = "task-1789272" [ 1959.905992] env[62816]: _type = "Task" [ 1959.905992] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.914830] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789272, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.115697] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.153s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.240439] env[62816]: DEBUG oslo_concurrency.lockutils [req-916337b6-086b-4fd0-977e-9e91e65aee66 req-1656ef9f-f001-406f-89d0-3b3148d801f5 service nova] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.385176] env[62816]: DEBUG oslo_vmware.api [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234282} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.388025] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1960.388221] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1960.388396] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1960.388568] env[62816]: INFO nova.compute.manager [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1960.388802] env[62816]: DEBUG oslo.service.loopingcall [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1960.388996] env[62816]: DEBUG nova.compute.manager [-] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1960.389104] env[62816]: DEBUG nova.network.neutron [-] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1960.395657] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789271, 'name': ReconfigVM_Task, 'duration_secs': 0.363445} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.395943] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1960.396239] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1960.415707] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789272, 'name': Rename_Task, 'duration_secs': 0.145303} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.415986] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1960.416241] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-caefc70a-835c-4c56-9000-f6160576e8c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.422506] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1960.422506] env[62816]: value = "task-1789273" [ 1960.422506] env[62816]: _type = "Task" [ 1960.422506] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.430477] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.625289] env[62816]: DEBUG oslo_concurrency.lockutils [None req-912c08b6-445b-4af8-a533-ff28e55b6446 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 21.240s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.693315] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.693665] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.693831] env[62816]: INFO nova.compute.manager [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Unshelving [ 1960.745393] env[62816]: DEBUG nova.compute.manager [req-1087a897-ad76-4115-80b3-dd8aa2fba433 req-e750fb18-b9fc-4f37-bb9e-a769528e7008 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Received event network-vif-deleted-4af57358-64d9-46b0-9d94-dad516307565 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1960.745580] env[62816]: INFO nova.compute.manager [req-1087a897-ad76-4115-80b3-dd8aa2fba433 req-e750fb18-b9fc-4f37-bb9e-a769528e7008 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Neutron deleted interface 4af57358-64d9-46b0-9d94-dad516307565; detaching it from the instance and deleting it from the info cache [ 1960.745756] env[62816]: DEBUG nova.network.neutron [req-1087a897-ad76-4115-80b3-dd8aa2fba433 req-e750fb18-b9fc-4f37-bb9e-a769528e7008 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.903028] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a94c46-23dd-4882-9ec4-c7e31ad47a84 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.927410] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fd1083-6551-40d2-93f0-4ca659df5050 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.952083] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789273, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.952415] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1961.113080] env[62816]: DEBUG nova.network.neutron [-] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.249267] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b64aa173-e4e3-4142-bc50-ec7b63632c76 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.259066] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc539f39-547e-4348-9ab1-19d1c950eb19 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.285986] env[62816]: DEBUG nova.compute.manager [req-1087a897-ad76-4115-80b3-dd8aa2fba433 req-e750fb18-b9fc-4f37-bb9e-a769528e7008 service nova] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Detach interface failed, port_id=4af57358-64d9-46b0-9d94-dad516307565, reason: Instance fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1961.433420] env[62816]: DEBUG oslo_vmware.api [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789273, 'name': PowerOnVM_Task, 'duration_secs': 0.577682} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.433668] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1961.433872] env[62816]: INFO nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Took 8.18 seconds to spawn the instance on the hypervisor. [ 1961.434073] env[62816]: DEBUG nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1961.434847] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acfa4161-429f-4fd8-84ff-9e10f993acc3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.616241] env[62816]: INFO nova.compute.manager [-] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Took 1.23 seconds to deallocate network for instance. [ 1961.718881] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.719148] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.719357] env[62816]: DEBUG nova.objects.instance [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'pci_requests' on Instance uuid 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.955987] env[62816]: INFO nova.compute.manager [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Took 14.82 seconds to build instance. [ 1962.123725] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.223411] env[62816]: DEBUG nova.objects.instance [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'numa_topology' on Instance uuid 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.456227] env[62816]: DEBUG nova.compute.manager [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Received event network-changed-738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1962.456227] env[62816]: DEBUG nova.compute.manager [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Refreshing instance network info cache due to event network-changed-738d894d-6a65-4c5d-891f-2c14246c82f4. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1962.456227] env[62816]: DEBUG oslo_concurrency.lockutils [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] Acquiring lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1962.456227] env[62816]: DEBUG oslo_concurrency.lockutils [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] Acquired lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.456422] env[62816]: DEBUG nova.network.neutron [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Refreshing network info cache for port 738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1962.457714] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2a5fae47-84d8-48ce-8343-3c3a0f1bac27 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.330s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.681125] env[62816]: DEBUG nova.network.neutron [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Port 7731c29e-449a-4c40-bb70-5a2c88561abe binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1962.726243] env[62816]: INFO nova.compute.claims [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1963.164398] env[62816]: DEBUG nova.network.neutron [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updated VIF entry in instance network info cache for port 738d894d-6a65-4c5d-891f-2c14246c82f4. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1963.164983] env[62816]: DEBUG nova.network.neutron [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updating instance_info_cache with network_info: [{"id": "738d894d-6a65-4c5d-891f-2c14246c82f4", "address": "fa:16:3e:9b:ac:53", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap738d894d-6a", "ovs_interfaceid": "738d894d-6a65-4c5d-891f-2c14246c82f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.667760] env[62816]: DEBUG oslo_concurrency.lockutils [req-a77bde6c-7a46-41cc-b8f8-0ea2f5be73a2 req-0f8e68a3-2dba-49ea-99a5-f33fa48f9e3f service nova] Releasing lock "refresh_cache-5e76d63c-b05c-4e8b-8b90-6110bd7d654c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1963.699829] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.700247] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.700483] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.829819] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c9ac83-fbd6-4bd7-92d0-e41db4209990 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.837325] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a31ef3b-bb74-48a6-aa1e-1feee14cd06a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.867721] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ec3afb0-057f-44e4-bdc5-d4b6827f00e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.875054] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13ced0f-dd67-4d91-8453-2f7ad2b0e763 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.889799] env[62816]: DEBUG nova.compute.provider_tree [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1964.393077] env[62816]: DEBUG nova.scheduler.client.report [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1964.756200] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1964.756410] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1964.756583] env[62816]: DEBUG nova.network.neutron [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1964.897410] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.178s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.899876] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.776s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.900153] env[62816]: DEBUG nova.objects.instance [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'resources' on Instance uuid fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.928553] env[62816]: INFO nova.network.neutron [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating port 850c89e0-1047-4847-b1c9-d9fd0435045e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1965.458298] env[62816]: DEBUG nova.network.neutron [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1965.524466] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5832d01d-fc54-41ae-9b3c-96bb47a5d63d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.532069] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c51c43e-714e-4681-8398-a78b6525623e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.563775] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fbff6b-faba-4b40-82ac-ae151971c380 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.571283] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdf194c-0a1e-4288-b309-29598a6a5723 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.584799] env[62816]: DEBUG nova.compute.provider_tree [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1965.962326] env[62816]: DEBUG oslo_concurrency.lockutils [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1966.089914] env[62816]: DEBUG nova.scheduler.client.report [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1966.415343] env[62816]: DEBUG nova.compute.manager [req-21c6c477-4e7c-4d69-912f-b2bab35e85a2 req-aed4a398-3dbe-4d02-ae11-7297b4d5dad1 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-vif-plugged-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1966.415576] env[62816]: DEBUG oslo_concurrency.lockutils [req-21c6c477-4e7c-4d69-912f-b2bab35e85a2 req-aed4a398-3dbe-4d02-ae11-7297b4d5dad1 service nova] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.415788] env[62816]: DEBUG oslo_concurrency.lockutils [req-21c6c477-4e7c-4d69-912f-b2bab35e85a2 req-aed4a398-3dbe-4d02-ae11-7297b4d5dad1 service nova] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.415958] env[62816]: DEBUG oslo_concurrency.lockutils [req-21c6c477-4e7c-4d69-912f-b2bab35e85a2 req-aed4a398-3dbe-4d02-ae11-7297b4d5dad1 service nova] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.416247] env[62816]: DEBUG nova.compute.manager [req-21c6c477-4e7c-4d69-912f-b2bab35e85a2 req-aed4a398-3dbe-4d02-ae11-7297b4d5dad1 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] No waiting events found dispatching network-vif-plugged-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1966.416431] env[62816]: WARNING nova.compute.manager [req-21c6c477-4e7c-4d69-912f-b2bab35e85a2 req-aed4a398-3dbe-4d02-ae11-7297b4d5dad1 service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received unexpected event network-vif-plugged-850c89e0-1047-4847-b1c9-d9fd0435045e for instance with vm_state shelved_offloaded and task_state spawning. [ 1966.472285] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8173400-79fb-478d-9392-56de65e107ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.478594] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a546459b-89cd-4dc0-89d2-b097c9210a3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.507468] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.507848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.508050] env[62816]: DEBUG nova.network.neutron [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1966.595055] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.695s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.612385] env[62816]: INFO nova.scheduler.client.report [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocations for instance fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4 [ 1967.118866] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1b8293e7-7008-47aa-9c63-7475114db655 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.020s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.216321] env[62816]: DEBUG nova.network.neutron [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.579770] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d66b40-d07d-4718-a7ac-f1eae423fb9b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.603796] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ad3778-5085-4a2d-9930-40d7049d9ccd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.610944] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1967.719823] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1967.743129] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='3e530134a01684f075ad6cde0ca8e534',container_format='bare',created_at=2024-12-12T02:59:55Z,direct_url=,disk_format='vmdk',id=6576f905-3e87-412b-9611-f955fc53c2e8,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-525805156-shelved',owner='138797faa4144ecbad6956e126963199',properties=ImageMetaProps,protected=,size=31668736,status='active',tags=,updated_at=2024-12-12T03:00:09Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1967.743393] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1967.743554] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1967.743739] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1967.743912] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1967.744091] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1967.744318] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1967.744478] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1967.744645] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1967.744807] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1967.745029] env[62816]: DEBUG nova.virt.hardware [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1967.745955] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5b03e0-7527-456a-8bd8-e1f143f5539a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.753614] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ea761a-54c7-4f59-b7ab-047b8dc6b0ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.767057] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:e2:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9b7e9e55-3210-4fae-9648-d87e76c3d931', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '850c89e0-1047-4847-b1c9-d9fd0435045e', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1967.774677] env[62816]: DEBUG oslo.service.loopingcall [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1967.774959] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1967.775192] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0fabed3-5b33-47ff-8edd-a178fc1c621b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.794838] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1967.794838] env[62816]: value = "task-1789276" [ 1967.794838] env[62816]: _type = "Task" [ 1967.794838] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.802916] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789276, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.117457] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1968.117766] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1fb701d6-eb02-4afe-a42c-fbe820ea7801 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.125113] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1968.125113] env[62816]: value = "task-1789277" [ 1968.125113] env[62816]: _type = "Task" [ 1968.125113] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.132867] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.239640] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "35852805-5776-4b65-96aa-4365b32c66d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.239897] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.306206] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789276, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.333248] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.333524] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.333745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1968.333980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1968.334210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1968.336468] env[62816]: INFO nova.compute.manager [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Terminating instance [ 1968.338407] env[62816]: DEBUG nova.compute.manager [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1968.338617] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1968.339470] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329ad2e8-a19a-45a1-81c1-05949f0e47d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.346972] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1968.347268] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fededcb1-745e-4013-8dcf-ec127bb6e7d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.353833] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1968.353833] env[62816]: value = "task-1789278" [ 1968.353833] env[62816]: _type = "Task" [ 1968.353833] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.364589] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.443451] env[62816]: DEBUG nova.compute.manager [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1968.443659] env[62816]: DEBUG nova.compute.manager [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing instance network info cache due to event network-changed-850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1968.443936] env[62816]: DEBUG oslo_concurrency.lockutils [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] Acquiring lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.444050] env[62816]: DEBUG oslo_concurrency.lockutils [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] Acquired lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.444241] env[62816]: DEBUG nova.network.neutron [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Refreshing network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1968.635804] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.742779] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1968.805900] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789276, 'name': CreateVM_Task, 'duration_secs': 0.619887} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.806177] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1968.814631] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1968.814829] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1968.815337] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1968.815628] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-023e63d5-6618-46e6-9557-a8dac4aabe25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.821222] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1968.821222] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d1517a-abb2-a3b7-7ec8-af0a51660b5b" [ 1968.821222] env[62816]: _type = "Task" [ 1968.821222] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.829996] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d1517a-abb2-a3b7-7ec8-af0a51660b5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.863063] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789278, 'name': PowerOffVM_Task, 'duration_secs': 0.203217} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.863344] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1968.863514] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1968.863762] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5cb2054-40af-42ad-a429-86ce42aef619 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.935170] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1968.935428] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1968.935613] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Deleting the datastore file [datastore1] cb0a9fc4-6809-4ce9-9521-eb1a115493cf {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1968.935898] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-883d258d-b3d7-40ec-97d4-79f6e7709124 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.942481] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for the task: (returnval){ [ 1968.942481] env[62816]: value = "task-1789281" [ 1968.942481] env[62816]: _type = "Task" [ 1968.942481] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.951433] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.135930] env[62816]: DEBUG oslo_vmware.api [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789277, 'name': PowerOnVM_Task, 'duration_secs': 0.644063} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.136325] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1969.136422] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-5e0c86f0-f77a-454f-9f00-beadb11be953 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance '341bf195-e528-4e3b-8636-fac7a383d228' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1969.209307] env[62816]: DEBUG nova.network.neutron [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updated VIF entry in instance network info cache for port 850c89e0-1047-4847-b1c9-d9fd0435045e. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1969.209692] env[62816]: DEBUG nova.network.neutron [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [{"id": "850c89e0-1047-4847-b1c9-d9fd0435045e", "address": "fa:16:3e:81:e2:cf", "network": {"id": "03d96b43-e138-41ab-973c-10df193a51ed", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-682332702-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "138797faa4144ecbad6956e126963199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9b7e9e55-3210-4fae-9648-d87e76c3d931", "external-id": "nsx-vlan-transportzone-967", "segmentation_id": 967, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap850c89e0-10", "ovs_interfaceid": "850c89e0-1047-4847-b1c9-d9fd0435045e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1969.264911] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.265233] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.266759] env[62816]: INFO nova.compute.claims [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1969.331693] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.332152] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Processing image 6576f905-3e87-412b-9611-f955fc53c2e8 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1969.332422] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.332422] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.332574] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1969.332841] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fde74707-c37b-4a3a-9719-ac71900ce1bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.348731] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1969.348918] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1969.349641] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0896a89-988f-4498-828e-ee7b4d6c5499 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.354709] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1969.354709] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e3f8cd-2ac8-801f-9d9c-1302a2706740" [ 1969.354709] env[62816]: _type = "Task" [ 1969.354709] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.362205] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e3f8cd-2ac8-801f-9d9c-1302a2706740, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.454140] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789281, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.712599] env[62816]: DEBUG oslo_concurrency.lockutils [req-9dffd891-40f7-489b-9619-6af70d194499 req-128d81ee-b7a4-438d-b89a-9668e1f702cf service nova] Releasing lock "refresh_cache-5b87e09d-ae08-4936-8479-c845e25b31b4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.865436] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Preparing fetch location {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1969.865694] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Fetch image to [datastore1] OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506/OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506.vmdk {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1969.865881] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Downloading stream optimized image 6576f905-3e87-412b-9611-f955fc53c2e8 to [datastore1] OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506/OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506.vmdk on the data store datastore1 as vApp {{(pid=62816) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1969.866073] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Downloading image file data 6576f905-3e87-412b-9611-f955fc53c2e8 to the ESX as VM named 'OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506' {{(pid=62816) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1969.936502] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1969.936502] env[62816]: value = "resgroup-9" [ 1969.936502] env[62816]: _type = "ResourcePool" [ 1969.936502] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1969.936795] env[62816]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2176a89f-bfee-4485-bddf-c2e098529f36 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.959594] env[62816]: DEBUG oslo_vmware.api [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Task: {'id': task-1789281, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.58112} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.960746] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1969.960937] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1969.961129] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1969.961306] env[62816]: INFO nova.compute.manager [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1969.961549] env[62816]: DEBUG oslo.service.loopingcall [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.961781] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease: (returnval){ [ 1969.961781] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c28b04-b711-759c-1155-ee8a2c66832b" [ 1969.961781] env[62816]: _type = "HttpNfcLease" [ 1969.961781] env[62816]: } obtained for vApp import into resource pool (val){ [ 1969.961781] env[62816]: value = "resgroup-9" [ 1969.961781] env[62816]: _type = "ResourcePool" [ 1969.961781] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1969.962017] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the lease: (returnval){ [ 1969.962017] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c28b04-b711-759c-1155-ee8a2c66832b" [ 1969.962017] env[62816]: _type = "HttpNfcLease" [ 1969.962017] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1969.962169] env[62816]: DEBUG nova.compute.manager [-] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1969.962266] env[62816]: DEBUG nova.network.neutron [-] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1969.969894] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1969.969894] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c28b04-b711-759c-1155-ee8a2c66832b" [ 1969.969894] env[62816]: _type = "HttpNfcLease" [ 1969.969894] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1970.378542] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3caffdb6-dc9f-4a6a-8fb7-054f4904871b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.386172] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1baaef1-5e6b-423b-962d-c41bcf40baec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.415695] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458943fb-52cd-4e66-a1f6-9671c8612f04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.423842] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d8f27c-b152-4434-b51a-0a45f8043ce1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.437888] env[62816]: DEBUG nova.compute.provider_tree [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1970.473024] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1970.473024] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c28b04-b711-759c-1155-ee8a2c66832b" [ 1970.473024] env[62816]: _type = "HttpNfcLease" [ 1970.473024] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1970.475715] env[62816]: DEBUG nova.compute.manager [req-639aaa6e-0cd0-4a2e-b7c1-cd803876e91f req-687141f8-8ed0-4a5e-ab54-cd90e999e6b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Received event network-vif-deleted-8f334780-4169-45ed-aac2-cf2be2b0b27a {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1970.475964] env[62816]: INFO nova.compute.manager [req-639aaa6e-0cd0-4a2e-b7c1-cd803876e91f req-687141f8-8ed0-4a5e-ab54-cd90e999e6b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Neutron deleted interface 8f334780-4169-45ed-aac2-cf2be2b0b27a; detaching it from the instance and deleting it from the info cache [ 1970.476106] env[62816]: DEBUG nova.network.neutron [req-639aaa6e-0cd0-4a2e-b7c1-cd803876e91f req-687141f8-8ed0-4a5e-ab54-cd90e999e6b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.941850] env[62816]: DEBUG nova.scheduler.client.report [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1970.945241] env[62816]: DEBUG nova.network.neutron [-] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.972867] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1970.972867] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c28b04-b711-759c-1155-ee8a2c66832b" [ 1970.972867] env[62816]: _type = "HttpNfcLease" [ 1970.972867] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1970.973185] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1970.973185] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c28b04-b711-759c-1155-ee8a2c66832b" [ 1970.973185] env[62816]: _type = "HttpNfcLease" [ 1970.973185] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1970.973886] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55550bd3-17b5-4bf2-971e-e197e3dfacbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.981184] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ac0e2c6-bfce-4a5c-8872-d9c5efc368ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.982799] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d083d-e87a-fd02-819c-845c6b83ac42/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1970.982976] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating HTTP connection to write to file with size = 31668736 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d083d-e87a-fd02-819c-845c6b83ac42/disk-0.vmdk. {{(pid=62816) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1971.043780] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00678b4f-77ff-4539-b00b-d69f4c11ba55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.059466] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9b4424e0-363e-4156-9e82-6ebf436bae96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.072645] env[62816]: DEBUG nova.compute.manager [req-639aaa6e-0cd0-4a2e-b7c1-cd803876e91f req-687141f8-8ed0-4a5e-ab54-cd90e999e6b3 service nova] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Detach interface failed, port_id=8f334780-4169-45ed-aac2-cf2be2b0b27a, reason: Instance cb0a9fc4-6809-4ce9-9521-eb1a115493cf could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1971.447754] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.182s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.448331] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1971.451151] env[62816]: INFO nova.compute.manager [-] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Took 1.49 seconds to deallocate network for instance. [ 1971.537676] env[62816]: DEBUG nova.network.neutron [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Port 7731c29e-449a-4c40-bb70-5a2c88561abe binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1971.538094] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1971.538335] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1971.538542] env[62816]: DEBUG nova.network.neutron [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1971.954165] env[62816]: DEBUG nova.compute.utils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1971.957305] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1971.957511] env[62816]: DEBUG nova.network.neutron [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1971.960652] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.960943] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.961215] env[62816]: DEBUG nova.objects.instance [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lazy-loading 'resources' on Instance uuid cb0a9fc4-6809-4ce9-9521-eb1a115493cf {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1972.004915] env[62816]: DEBUG nova.policy [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c5023a18d243b7aafb6d6181f931d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e070d1729247ff83b4ff6997b45385', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1972.169291] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Completed reading data from the image iterator. {{(pid=62816) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1972.169598] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d083d-e87a-fd02-819c-845c6b83ac42/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1972.170595] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d6a348-7229-4b90-ae2a-e448319b4b20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.177662] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d083d-e87a-fd02-819c-845c6b83ac42/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1972.177866] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d083d-e87a-fd02-819c-845c6b83ac42/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1972.178062] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-0749c31c-afc1-4198-8189-bbb33b0020c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.256651] env[62816]: DEBUG nova.network.neutron [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Successfully created port: 942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1972.280132] env[62816]: DEBUG nova.network.neutron [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1972.361316] env[62816]: DEBUG oslo_vmware.rw_handles [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d083d-e87a-fd02-819c-845c6b83ac42/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1972.361667] env[62816]: INFO nova.virt.vmwareapi.images [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Downloaded image file data 6576f905-3e87-412b-9611-f955fc53c2e8 [ 1972.362443] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b444a0d7-b380-413a-b7c6-06edadf82130 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.377660] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a98f210-fdfd-491e-8627-358109237366 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.406690] env[62816]: INFO nova.virt.vmwareapi.images [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] The imported VM was unregistered [ 1972.409136] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Caching image {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1972.409369] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Creating directory with path [datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1972.409630] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8adde086-f93e-49e8-b05e-b43758adfdd1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.436972] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Created directory with path [datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1972.437202] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506/OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506.vmdk to [datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk. {{(pid=62816) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1972.437458] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-5b0b5299-c158-465d-8789-709289c5b8ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.443732] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1972.443732] env[62816]: value = "task-1789285" [ 1972.443732] env[62816]: _type = "Task" [ 1972.443732] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1972.452238] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1972.461021] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1972.562178] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a19c3700-d8e6-4b02-8ef1-5f40a5a6650d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.569549] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84cf5417-95e6-4c58-a16c-59c58ac49b65 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.601012] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ff0daa-2acd-4008-957e-f3b7d6046a15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.607755] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060aaf41-3fba-4643-bf43-af5384379b1e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.620419] env[62816]: DEBUG nova.compute.provider_tree [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1972.783368] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1972.955276] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.123750] env[62816]: DEBUG nova.scheduler.client.report [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1973.286897] env[62816]: DEBUG nova.compute.manager [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=62816) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1973.455055] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1973.470065] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1973.497680] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1973.497952] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1973.498169] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1973.498397] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1973.498552] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1973.498705] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1973.498926] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1973.499108] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1973.499291] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1973.499461] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1973.499640] env[62816]: DEBUG nova.virt.hardware [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1973.500586] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb6ec50-37bb-47dc-92c7-eec4bdfd06f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.509305] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9573b48f-9db3-40d6-9c2d-9531a5920e20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1973.629679] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.651914] env[62816]: INFO nova.scheduler.client.report [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Deleted allocations for instance cb0a9fc4-6809-4ce9-9521-eb1a115493cf [ 1973.703079] env[62816]: DEBUG nova.compute.manager [req-5f39db24-bd8b-4edc-947b-a3e630986f35 req-fcc7992c-b05a-43d3-a4d3-86e5a8cd4a41 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Received event network-vif-plugged-942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1973.703079] env[62816]: DEBUG oslo_concurrency.lockutils [req-5f39db24-bd8b-4edc-947b-a3e630986f35 req-fcc7992c-b05a-43d3-a4d3-86e5a8cd4a41 service nova] Acquiring lock "35852805-5776-4b65-96aa-4365b32c66d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.704121] env[62816]: DEBUG oslo_concurrency.lockutils [req-5f39db24-bd8b-4edc-947b-a3e630986f35 req-fcc7992c-b05a-43d3-a4d3-86e5a8cd4a41 service nova] Lock "35852805-5776-4b65-96aa-4365b32c66d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.704403] env[62816]: DEBUG oslo_concurrency.lockutils [req-5f39db24-bd8b-4edc-947b-a3e630986f35 req-fcc7992c-b05a-43d3-a4d3-86e5a8cd4a41 service nova] Lock "35852805-5776-4b65-96aa-4365b32c66d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.704631] env[62816]: DEBUG nova.compute.manager [req-5f39db24-bd8b-4edc-947b-a3e630986f35 req-fcc7992c-b05a-43d3-a4d3-86e5a8cd4a41 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] No waiting events found dispatching network-vif-plugged-942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1973.704854] env[62816]: WARNING nova.compute.manager [req-5f39db24-bd8b-4edc-947b-a3e630986f35 req-fcc7992c-b05a-43d3-a4d3-86e5a8cd4a41 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Received unexpected event network-vif-plugged-942da178-a0ce-4757-9a96-359dd73d7aff for instance with vm_state building and task_state spawning. [ 1973.848631] env[62816]: DEBUG nova.network.neutron [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Successfully updated port: 942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1973.957628] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.162125] env[62816]: DEBUG oslo_concurrency.lockutils [None req-757eb942-e33c-4f87-a061-7242568d5b0e tempest-ServersTestManualDisk-865634038 tempest-ServersTestManualDisk-865634038-project-member] Lock "cb0a9fc4-6809-4ce9-9521-eb1a115493cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.828s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.351817] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-35852805-5776-4b65-96aa-4365b32c66d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.352093] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-35852805-5776-4b65-96aa-4365b32c66d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.352404] env[62816]: DEBUG nova.network.neutron [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1974.399079] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.399350] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.457648] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.904434] env[62816]: DEBUG nova.objects.instance [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'migration_context' on Instance uuid 341bf195-e528-4e3b-8636-fac7a383d228 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1974.904519] env[62816]: DEBUG nova.network.neutron [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1974.957161] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.153187] env[62816]: DEBUG nova.network.neutron [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Updating instance_info_cache with network_info: [{"id": "942da178-a0ce-4757-9a96-359dd73d7aff", "address": "fa:16:3e:43:4f:3d", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942da178-a0", "ovs_interfaceid": "942da178-a0ce-4757-9a96-359dd73d7aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.458888] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.509674] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd82819a-f579-44e9-94d1-e97c390a15b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.518981] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091eb869-eaf6-4437-8cc8-5c516c004cb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.554471] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a305885-082d-4eb6-9805-145454dfc1e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.562894] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f308f3c-de7f-47fa-ae98-1f7425e3787e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.576587] env[62816]: DEBUG nova.compute.provider_tree [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1975.655768] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-35852805-5776-4b65-96aa-4365b32c66d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.656188] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Instance network_info: |[{"id": "942da178-a0ce-4757-9a96-359dd73d7aff", "address": "fa:16:3e:43:4f:3d", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942da178-a0", "ovs_interfaceid": "942da178-a0ce-4757-9a96-359dd73d7aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1975.656646] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:4f:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '942da178-a0ce-4757-9a96-359dd73d7aff', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1975.664949] env[62816]: DEBUG oslo.service.loopingcall [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1975.665308] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1975.665558] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-32b380e6-6f92-4b3e-b129-8d5c8668457e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.687566] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1975.687566] env[62816]: value = "task-1789287" [ 1975.687566] env[62816]: _type = "Task" [ 1975.687566] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.697255] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789287, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.816062] env[62816]: DEBUG nova.compute.manager [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Received event network-changed-942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1975.816856] env[62816]: DEBUG nova.compute.manager [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Refreshing instance network info cache due to event network-changed-942da178-a0ce-4757-9a96-359dd73d7aff. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1975.816856] env[62816]: DEBUG oslo_concurrency.lockutils [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] Acquiring lock "refresh_cache-35852805-5776-4b65-96aa-4365b32c66d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.816856] env[62816]: DEBUG oslo_concurrency.lockutils [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] Acquired lock "refresh_cache-35852805-5776-4b65-96aa-4365b32c66d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.816856] env[62816]: DEBUG nova.network.neutron [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Refreshing network info cache for port 942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1975.959854] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.080445] env[62816]: DEBUG nova.scheduler.client.report [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1976.199546] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789287, 'name': CreateVM_Task, 'duration_secs': 0.482086} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.199725] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1976.200467] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.200643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.200997] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1976.201296] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-249731b1-6860-4a75-96d3-17421e57b614 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.208056] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1976.208056] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cfa54a-11b0-5747-9d21-3031e65a1f0a" [ 1976.208056] env[62816]: _type = "Task" [ 1976.208056] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.216961] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cfa54a-11b0-5747-9d21-3031e65a1f0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.461034] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.595158] env[62816]: DEBUG nova.network.neutron [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Updated VIF entry in instance network info cache for port 942da178-a0ce-4757-9a96-359dd73d7aff. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1976.595158] env[62816]: DEBUG nova.network.neutron [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Updating instance_info_cache with network_info: [{"id": "942da178-a0ce-4757-9a96-359dd73d7aff", "address": "fa:16:3e:43:4f:3d", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap942da178-a0", "ovs_interfaceid": "942da178-a0ce-4757-9a96-359dd73d7aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1976.719976] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cfa54a-11b0-5747-9d21-3031e65a1f0a, 'name': SearchDatastore_Task, 'duration_secs': 0.146516} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.722322] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.722584] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1976.722861] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1976.723029] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1976.723221] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1976.723513] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0e13c3e-ff93-4859-a8e9-58231a9f29eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.738716] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1976.738976] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1976.739816] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a1eeec1-40b8-4690-9f19-ed8dabbfbfdb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.749050] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1976.749050] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523ee05a-07bd-1d9e-3c70-98ff453e2e7f" [ 1976.749050] env[62816]: _type = "Task" [ 1976.749050] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.758385] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523ee05a-07bd-1d9e-3c70-98ff453e2e7f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.961735] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.095830] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.696s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.105460] env[62816]: DEBUG oslo_concurrency.lockutils [req-3ebbc633-fb1a-406b-8109-ffddde4d1cd8 req-af164a2e-89bf-446e-b26c-0265d2a834b0 service nova] Releasing lock "refresh_cache-35852805-5776-4b65-96aa-4365b32c66d5" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.261224] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523ee05a-07bd-1d9e-3c70-98ff453e2e7f, 'name': SearchDatastore_Task, 'duration_secs': 0.146619} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.262261] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb661104-b9d8-4b40-b2f9-f93337d0413f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.269363] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1977.269363] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52110472-b611-3964-524e-c6904303f320" [ 1977.269363] env[62816]: _type = "Task" [ 1977.269363] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.279442] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52110472-b611-3964-524e-c6904303f320, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.461620] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.780938] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52110472-b611-3964-524e-c6904303f320, 'name': SearchDatastore_Task, 'duration_secs': 0.085919} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.781229] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1977.781480] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 35852805-5776-4b65-96aa-4365b32c66d5/35852805-5776-4b65-96aa-4365b32c66d5.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1977.781748] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b283c0b4-67c4-410d-a987-3fd5c2a385e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.790289] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1977.790289] env[62816]: value = "task-1789288" [ 1977.790289] env[62816]: _type = "Task" [ 1977.790289] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.800561] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789288, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.962080] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789285, 'name': MoveVirtualDisk_Task, 'duration_secs': 5.10134} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.962367] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506/OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506.vmdk to [datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk. [ 1977.962559] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Cleaning up location [datastore1] OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1977.962724] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c81ac1e9-26ea-40d6-9443-4ddeb7123506 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1977.962980] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0fb50d0e-7767-4033-b369-6d19fa77dbfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.970055] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1977.970055] env[62816]: value = "task-1789289" [ 1977.970055] env[62816]: _type = "Task" [ 1977.970055] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.981535] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.308055] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789288, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.481909] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.052875} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.482346] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1978.482529] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1978.482784] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk to [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1978.484011] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d25f440c-ebec-416c-a288-344c893674e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.491913] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1978.491913] env[62816]: value = "task-1789290" [ 1978.491913] env[62816]: _type = "Task" [ 1978.491913] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.501664] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.641677] env[62816]: INFO nova.compute.manager [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Swapping old allocation on dict_keys(['27f49c85-1bb9-4d17-a914-e2f45a5e84fa']) held by migration 090d0a64-f039-4520-a304-542d76707944 for instance [ 1978.670138] env[62816]: DEBUG nova.scheduler.client.report [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Overwriting current allocation {'allocations': {'27f49c85-1bb9-4d17-a914-e2f45a5e84fa': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 141}}, 'project_id': '8c54ea5a5abf4f0298b76f6081de8e60', 'user_id': '6a59c608ab954a3ba9cd61a84f30b89f', 'consumer_generation': 1} on consumer 341bf195-e528-4e3b-8636-fac7a383d228 {{(pid=62816) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1978.769258] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1978.769258] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.769258] env[62816]: DEBUG nova.network.neutron [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1978.803305] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789288, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.588548} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.803805] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 35852805-5776-4b65-96aa-4365b32c66d5/35852805-5776-4b65-96aa-4365b32c66d5.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1978.804067] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1978.804335] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3ab989c4-fdfb-4d65-91a5-5568e4429a99 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.812805] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1978.812805] env[62816]: value = "task-1789292" [ 1978.812805] env[62816]: _type = "Task" [ 1978.812805] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.824775] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789292, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.005845] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.323957] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789292, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09525} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.324300] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1979.325170] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228c576f-1da4-4ec2-959d-630b81292c43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.348992] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 35852805-5776-4b65-96aa-4365b32c66d5/35852805-5776-4b65-96aa-4365b32c66d5.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1979.349405] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f53d995d-317b-49e3-8906-a0198f230fb8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.371955] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1979.371955] env[62816]: value = "task-1789293" [ 1979.371955] env[62816]: _type = "Task" [ 1979.371955] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.380648] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789293, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.503867] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.511014] env[62816]: DEBUG nova.network.neutron [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [{"id": "7731c29e-449a-4c40-bb70-5a2c88561abe", "address": "fa:16:3e:3e:fe:03", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7731c29e-44", "ovs_interfaceid": "7731c29e-449a-4c40-bb70-5a2c88561abe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1979.884244] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789293, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.005053] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.013744] env[62816]: DEBUG oslo_concurrency.lockutils [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-341bf195-e528-4e3b-8636-fac7a383d228" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.014735] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae21e8ca-5d11-400a-b1da-1c71c7331523 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.021901] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d3418d-3e71-425d-a83a-8fc5f3314439 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.322240] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1980.322489] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.384085] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789293, 'name': ReconfigVM_Task, 'duration_secs': 0.868414} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.384431] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 35852805-5776-4b65-96aa-4365b32c66d5/35852805-5776-4b65-96aa-4365b32c66d5.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1980.385049] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4131cf59-98e3-4fb8-8b3a-640bb95fa3ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.392157] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1980.392157] env[62816]: value = "task-1789294" [ 1980.392157] env[62816]: _type = "Task" [ 1980.392157] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.401574] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789294, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.504950] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.824845] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1980.904039] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789294, 'name': Rename_Task, 'duration_secs': 0.164981} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.904501] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1980.904814] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b896dd3d-bf98-4c8e-a2f8-58224cc2aefb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.913175] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1980.913175] env[62816]: value = "task-1789296" [ 1980.913175] env[62816]: _type = "Task" [ 1980.913175] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.922182] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.005940] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 26%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.110965] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1981.111382] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-998fbf23-21a2-4c52-ad69-7f49234158d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.121552] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1981.121552] env[62816]: value = "task-1789297" [ 1981.121552] env[62816]: _type = "Task" [ 1981.121552] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.132074] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.349434] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.349732] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.351287] env[62816]: INFO nova.compute.claims [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1981.430061] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789296, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.506456] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.632213] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.924606] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789296, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.007707] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.133530] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789297, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.426856] env[62816]: DEBUG oslo_vmware.api [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789296, 'name': PowerOnVM_Task, 'duration_secs': 1.261426} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.429671] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1982.429899] env[62816]: INFO nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Took 8.96 seconds to spawn the instance on the hypervisor. [ 1982.430098] env[62816]: DEBUG nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1982.431185] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd3f2f6-bac6-4416-b290-513ead097666 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.455592] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15572e0c-17c7-4781-a36d-8b1c0a4c10b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.463989] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18700a6d-7179-4be6-80fe-83a2d7bef466 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.496335] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d5f31c-bcc4-47c5-9749-8dabd4d9f079 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.512810] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.514112] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54a541a-ee71-4e96-9fc4-53ab1f66b3d8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.529431] env[62816]: DEBUG nova.compute.provider_tree [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1982.634505] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789297, 'name': PowerOffVM_Task, 'duration_secs': 1.215682} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.634885] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1982.635508] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1982.635725] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1982.635882] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1982.636085] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1982.636239] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1982.636388] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1982.636615] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1982.636798] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1982.637011] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1982.637612] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1982.637826] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1982.642797] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e73292c9-ec1e-49cf-8c5b-9ff12c4b1e6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.660120] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1982.660120] env[62816]: value = "task-1789298" [ 1982.660120] env[62816]: _type = "Task" [ 1982.660120] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.670023] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789298, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.953966] env[62816]: INFO nova.compute.manager [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Took 13.71 seconds to build instance. [ 1983.009290] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789290, 'name': CopyVirtualDisk_Task, 'duration_secs': 4.303612} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.009946] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6576f905-3e87-412b-9611-f955fc53c2e8/6576f905-3e87-412b-9611-f955fc53c2e8.vmdk to [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1983.010931] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44fc515-5735-4317-a3d8-3ac2fce17a47 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.033154] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1983.034017] env[62816]: DEBUG nova.scheduler.client.report [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1983.036999] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d7d9341-0844-410b-afbb-e0a9a9618c62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.051368] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.702s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.051824] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1983.060052] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1983.060052] env[62816]: value = "task-1789299" [ 1983.060052] env[62816]: _type = "Task" [ 1983.060052] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.068388] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789299, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.171687] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789298, 'name': ReconfigVM_Task, 'duration_secs': 0.344016} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.172563] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5adcc21-c130-490b-83da-7e6061dc26a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.196320] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1983.196583] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1983.196748] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1983.196933] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1983.197093] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1983.197245] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1983.197478] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1983.198176] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1983.198398] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1983.198579] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1983.198758] env[62816]: DEBUG nova.virt.hardware [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1983.199575] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba01154e-4867-4622-841d-7e0335c4470e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.205753] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1983.205753] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5233c027-c09e-1ff9-0f9d-ef1f00257a01" [ 1983.205753] env[62816]: _type = "Task" [ 1983.205753] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.214861] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5233c027-c09e-1ff9-0f9d-ef1f00257a01, 'name': SearchDatastore_Task} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.220212] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1983.220799] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34b44eb9-034b-4532-be21-5824b9ed17a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.239959] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1983.239959] env[62816]: value = "task-1789300" [ 1983.239959] env[62816]: _type = "Task" [ 1983.239959] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.249570] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.456282] env[62816]: DEBUG oslo_concurrency.lockutils [None req-211b5181-034b-4c1f-802d-a3ee7216f53b tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.216s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.556354] env[62816]: DEBUG nova.compute.utils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1983.557349] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1983.558027] env[62816]: DEBUG nova.network.neutron [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1983.570979] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789299, 'name': ReconfigVM_Task, 'duration_secs': 0.339961} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.571263] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4/5b87e09d-ae08-4936-8479-c845e25b31b4.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1983.571972] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdfeb520-6d16-4758-aa4f-e2295228e7a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.579478] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1983.579478] env[62816]: value = "task-1789301" [ 1983.579478] env[62816]: _type = "Task" [ 1983.579478] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.587445] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789301, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.605350] env[62816]: DEBUG nova.policy [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f53618eedbd4be28d440e1cbd81a8fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53b24724dc3344f0b4206a015e34f2e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1983.751159] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789300, 'name': ReconfigVM_Task, 'duration_secs': 0.224473} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.751488] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1983.752318] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f864772-ed92-49b1-a306-622ebb41f235 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.777167] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1983.777884] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f783bb06-2ec4-4c2c-a2a1-c38ede21930e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.796790] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1983.796790] env[62816]: value = "task-1789302" [ 1983.796790] env[62816]: _type = "Task" [ 1983.796790] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.805430] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.857933] env[62816]: DEBUG nova.network.neutron [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Successfully created port: 22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1984.060950] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1984.093825] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789301, 'name': Rename_Task, 'duration_secs': 0.164817} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.094142] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1984.094451] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1461152d-a19a-490f-a1bc-4a2846074d79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.105458] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 1984.105458] env[62816]: value = "task-1789303" [ 1984.105458] env[62816]: _type = "Task" [ 1984.105458] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.115185] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.308050] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789302, 'name': ReconfigVM_Task, 'duration_secs': 0.3416} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.308347] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228/341bf195-e528-4e3b-8636-fac7a383d228.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1984.309231] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac235ab6-a319-490b-aa25-37617b8cde08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.332404] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab08be9-95f3-4978-a41b-c40fb1c2a891 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.353826] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8da240a-521d-424f-ae03-040ce30a9966 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.375307] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673c343c-9fd9-41ac-a6a7-9f80040d7332 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.382786] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1984.383062] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-183f73c9-6313-4b22-bec2-30ed2e58c9f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.389712] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1984.389712] env[62816]: value = "task-1789304" [ 1984.389712] env[62816]: _type = "Task" [ 1984.389712] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.397960] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.474429] env[62816]: DEBUG oslo_concurrency.lockutils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "35852805-5776-4b65-96aa-4365b32c66d5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.474682] env[62816]: DEBUG oslo_concurrency.lockutils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.618926] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789303, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.901012] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789304, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.978187] env[62816]: DEBUG nova.compute.utils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1985.071537] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1985.099719] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1985.099982] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1985.100161] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1985.100466] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1985.101031] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1985.101031] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1985.101252] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1985.101455] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1985.101671] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1985.101894] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1985.102136] env[62816]: DEBUG nova.virt.hardware [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1985.103061] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2dcd76b-20f9-4630-aeec-b8175c8c289a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.114815] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b20da25-cbd0-4e75-8d61-3d9a0015d315 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.122214] env[62816]: DEBUG oslo_vmware.api [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789303, 'name': PowerOnVM_Task, 'duration_secs': 0.533046} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.122836] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1985.223237] env[62816]: DEBUG nova.compute.manager [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1985.224160] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbed819c-7712-4e25-89da-66f2afcd87bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.383930] env[62816]: DEBUG nova.compute.manager [req-76e1ac1b-20ef-48ce-9442-dfaf7e56f55d req-47d385a2-e604-4d6f-af40-04923f38fd44 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Received event network-vif-plugged-22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1985.384282] env[62816]: DEBUG oslo_concurrency.lockutils [req-76e1ac1b-20ef-48ce-9442-dfaf7e56f55d req-47d385a2-e604-4d6f-af40-04923f38fd44 service nova] Acquiring lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.384648] env[62816]: DEBUG oslo_concurrency.lockutils [req-76e1ac1b-20ef-48ce-9442-dfaf7e56f55d req-47d385a2-e604-4d6f-af40-04923f38fd44 service nova] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.384907] env[62816]: DEBUG oslo_concurrency.lockutils [req-76e1ac1b-20ef-48ce-9442-dfaf7e56f55d req-47d385a2-e604-4d6f-af40-04923f38fd44 service nova] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.385171] env[62816]: DEBUG nova.compute.manager [req-76e1ac1b-20ef-48ce-9442-dfaf7e56f55d req-47d385a2-e604-4d6f-af40-04923f38fd44 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] No waiting events found dispatching network-vif-plugged-22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1985.385419] env[62816]: WARNING nova.compute.manager [req-76e1ac1b-20ef-48ce-9442-dfaf7e56f55d req-47d385a2-e604-4d6f-af40-04923f38fd44 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Received unexpected event network-vif-plugged-22ed2ee0-3c70-488b-9400-1d013013e5f5 for instance with vm_state building and task_state spawning. [ 1985.405924] env[62816]: DEBUG oslo_vmware.api [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789304, 'name': PowerOnVM_Task, 'duration_secs': 0.852622} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.406289] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1985.476133] env[62816]: DEBUG nova.network.neutron [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Successfully updated port: 22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1985.480507] env[62816]: DEBUG oslo_concurrency.lockutils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.741131] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e61f41b-2efb-472b-b9a5-f0c5c8026cfb tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 25.047s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.979045] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-40bddbd1-9fa6-4dfb-9131-6c376f9417de" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.979263] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-40bddbd1-9fa6-4dfb-9131-6c376f9417de" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.979460] env[62816]: DEBUG nova.network.neutron [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1986.444943] env[62816]: INFO nova.compute.manager [None req-48271415-6e2f-49e5-bb7c-24b4938c9f75 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance to original state: 'active' [ 1986.509673] env[62816]: DEBUG nova.network.neutron [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1986.537909] env[62816]: DEBUG oslo_concurrency.lockutils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "35852805-5776-4b65-96aa-4365b32c66d5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.538201] env[62816]: DEBUG oslo_concurrency.lockutils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.538534] env[62816]: INFO nova.compute.manager [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Attaching volume fd61a4ee-ec53-4ec3-a17f-92f608bcaabb to /dev/sdb [ 1986.569377] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ed7137-59a3-425a-a009-d767cc8ab9bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.577406] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e14b5a-837e-4d7b-b063-3278ef51f100 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.593656] env[62816]: DEBUG nova.virt.block_device [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Updating existing volume attachment record: c7c8d7a6-f3f1-482e-8556-9feb245a0c6a {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1986.648313] env[62816]: DEBUG nova.network.neutron [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Updating instance_info_cache with network_info: [{"id": "22ed2ee0-3c70-488b-9400-1d013013e5f5", "address": "fa:16:3e:d5:ed:77", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22ed2ee0-3c", "ovs_interfaceid": "22ed2ee0-3c70-488b-9400-1d013013e5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1987.155720] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-40bddbd1-9fa6-4dfb-9131-6c376f9417de" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1987.155720] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance network_info: |[{"id": "22ed2ee0-3c70-488b-9400-1d013013e5f5", "address": "fa:16:3e:d5:ed:77", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22ed2ee0-3c", "ovs_interfaceid": "22ed2ee0-3c70-488b-9400-1d013013e5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1987.156051] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:ed:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22ed2ee0-3c70-488b-9400-1d013013e5f5', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1987.163303] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating folder: Project (53b24724dc3344f0b4206a015e34f2e4). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1987.163586] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ccf0d59-3133-43db-afe8-d564c26b8e2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.176608] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created folder: Project (53b24724dc3344f0b4206a015e34f2e4) in parent group-v370905. [ 1987.176910] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating folder: Instances. Parent ref: group-v371207. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1987.177870] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fc79331-3e18-4795-a86f-a823afbab938 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.188048] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created folder: Instances in parent group-v371207. [ 1987.188048] env[62816]: DEBUG oslo.service.loopingcall [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1987.188217] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1987.188437] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89f41449-1718-4372-8a3e-49529812656c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.209128] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1987.209128] env[62816]: value = "task-1789311" [ 1987.209128] env[62816]: _type = "Task" [ 1987.209128] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.217553] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789311, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.463887] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.464543] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.464543] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "341bf195-e528-4e3b-8636-fac7a383d228-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.464680] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.464787] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.466939] env[62816]: INFO nova.compute.manager [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Terminating instance [ 1987.469103] env[62816]: DEBUG nova.compute.manager [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1987.469337] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1987.469605] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00550323-f86e-40ff-b5ef-abe462c9e369 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.478504] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1987.478504] env[62816]: value = "task-1789312" [ 1987.478504] env[62816]: _type = "Task" [ 1987.478504] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.489428] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789312, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.500672] env[62816]: DEBUG nova.compute.manager [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Received event network-changed-22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1987.500672] env[62816]: DEBUG nova.compute.manager [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Refreshing instance network info cache due to event network-changed-22ed2ee0-3c70-488b-9400-1d013013e5f5. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1987.501088] env[62816]: DEBUG oslo_concurrency.lockutils [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] Acquiring lock "refresh_cache-40bddbd1-9fa6-4dfb-9131-6c376f9417de" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1987.501271] env[62816]: DEBUG oslo_concurrency.lockutils [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] Acquired lock "refresh_cache-40bddbd1-9fa6-4dfb-9131-6c376f9417de" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1987.501557] env[62816]: DEBUG nova.network.neutron [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Refreshing network info cache for port 22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1987.722048] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789311, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.988356] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789312, 'name': PowerOffVM_Task, 'duration_secs': 0.371713} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1987.988661] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1987.988863] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1987.989068] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371195', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'name': 'volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '341bf195-e528-4e3b-8636-fac7a383d228', 'attached_at': '2024-12-12T03:00:37.000000', 'detached_at': '', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'serial': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1987.989876] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c14c58-51a5-4309-9f9a-7ce692a5d644 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.015030] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de19c3f-826f-4487-8ddc-ac36a78c9796 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.022857] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72df9093-24b6-4818-bc75-8c032cfc3100 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.730371] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcd8d69-9cef-4638-87fd-41c29f1cfcd8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.733010] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.733241] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.740730] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789311, 'name': CreateVM_Task, 'duration_secs': 0.56602} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.751285] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1988.751889] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] The volume has not been displaced from its original location: [datastore1] volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0/volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1988.757117] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1988.758359] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1988.758520] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1988.758843] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1988.759093] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a680ed73-74a7-4bb8-ba99-7761d61faeb2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.771687] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11421192-5eb7-440a-ae96-b6b79fa2164e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.778748] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1988.778748] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52670d85-e02f-9bb2-e14e-d9e16162c2d3" [ 1988.778748] env[62816]: _type = "Task" [ 1988.778748] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.785041] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1988.785041] env[62816]: value = "task-1789313" [ 1988.785041] env[62816]: _type = "Task" [ 1988.785041] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.791619] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52670d85-e02f-9bb2-e14e-d9e16162c2d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.796839] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789313, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.912775] env[62816]: DEBUG nova.network.neutron [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Updated VIF entry in instance network info cache for port 22ed2ee0-3c70-488b-9400-1d013013e5f5. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1988.913166] env[62816]: DEBUG nova.network.neutron [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Updating instance_info_cache with network_info: [{"id": "22ed2ee0-3c70-488b-9400-1d013013e5f5", "address": "fa:16:3e:d5:ed:77", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22ed2ee0-3c", "ovs_interfaceid": "22ed2ee0-3c70-488b-9400-1d013013e5f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1989.236235] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1989.290302] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52670d85-e02f-9bb2-e14e-d9e16162c2d3, 'name': SearchDatastore_Task, 'duration_secs': 0.029868} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.293421] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.293661] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1989.293890] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1989.294050] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1989.294239] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1989.294551] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cac275ff-c435-4b7b-a2d7-ca69a46479b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.303427] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789313, 'name': ReconfigVM_Task, 'duration_secs': 0.330912} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.303944] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1989.309838] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e70b178-727d-42aa-bbf3-8819ff48965c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.319872] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1989.320071] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1989.321062] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27b91627-265d-4626-bf74-b0884bf75037 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.327380] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1989.327380] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e6bcc2-fab1-f602-5b18-5b6a8eeefe56" [ 1989.327380] env[62816]: _type = "Task" [ 1989.327380] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.331334] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1989.331334] env[62816]: value = "task-1789315" [ 1989.331334] env[62816]: _type = "Task" [ 1989.331334] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.337336] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e6bcc2-fab1-f602-5b18-5b6a8eeefe56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.342013] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789315, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.416477] env[62816]: DEBUG oslo_concurrency.lockutils [req-7e4ea35c-170a-4aeb-81c7-924b5b1699b5 req-4cb5c94b-2449-4ccd-952e-57cbc8ac41f8 service nova] Releasing lock "refresh_cache-40bddbd1-9fa6-4dfb-9131-6c376f9417de" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.759635] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.759894] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.761574] env[62816]: INFO nova.compute.claims [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1989.839435] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e6bcc2-fab1-f602-5b18-5b6a8eeefe56, 'name': SearchDatastore_Task, 'duration_secs': 0.01053} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.840534] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27fbdb7c-9464-454a-9978-a3b4dc659f5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.845355] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789315, 'name': ReconfigVM_Task, 'duration_secs': 0.148503} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.846019] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371195', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'name': 'volume-2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '341bf195-e528-4e3b-8636-fac7a383d228', 'attached_at': '2024-12-12T03:00:37.000000', 'detached_at': '', 'volume_id': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0', 'serial': '2530eec9-9785-4b73-88ea-3c1e49f8f3c0'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1989.846310] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1989.847035] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f8ba7f-af4f-47a7-8408-68b917435ee1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.850419] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1989.850419] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ebec24-bb42-f74f-b1e1-20dc8a4d3f72" [ 1989.850419] env[62816]: _type = "Task" [ 1989.850419] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.855806] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1989.856328] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8e3ad214-a684-44f0-8a58-a9c10744cdae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.861410] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ebec24-bb42-f74f-b1e1-20dc8a4d3f72, 'name': SearchDatastore_Task} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.861643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1989.861894] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1989.862143] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-729dc9ce-27a2-4d58-9071-6c81a9ff7fb3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.869645] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1989.869645] env[62816]: value = "task-1789317" [ 1989.869645] env[62816]: _type = "Task" [ 1989.869645] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.879074] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.984614] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1989.984946] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1989.985250] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleting the datastore file [datastore1] 341bf195-e528-4e3b-8636-fac7a383d228 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1989.985624] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b48ac3d1-dd52-4d35-bdc5-c4f190dc7771 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.993392] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 1989.993392] env[62816]: value = "task-1789318" [ 1989.993392] env[62816]: _type = "Task" [ 1989.993392] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.002959] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.381302] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789317, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.503657] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.857472] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b611b158-44dc-40f8-87d4-9b76f041aa69 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.865328] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f931ad9e-420d-4c67-a347-f219638af42a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.898938] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58358ee5-7449-4a8b-bfec-87eec2de621c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.903630] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520325} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.904198] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1990.904420] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1990.904665] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a4a7c62d-cd79-4b87-ae97-d665a0d97808 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.909276] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537dcb35-dbbb-4756-ba54-df23714c2e9b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.913577] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1990.913577] env[62816]: value = "task-1789319" [ 1990.913577] env[62816]: _type = "Task" [ 1990.913577] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.924520] env[62816]: DEBUG nova.compute.provider_tree [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1990.930374] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789319, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.004046] env[62816]: DEBUG oslo_vmware.api [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.572566} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.004319] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1991.004536] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1991.004713] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1991.004902] env[62816]: INFO nova.compute.manager [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Took 3.54 seconds to destroy the instance on the hypervisor. [ 1991.005171] env[62816]: DEBUG oslo.service.loopingcall [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1991.005361] env[62816]: DEBUG nova.compute.manager [-] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1991.005484] env[62816]: DEBUG nova.network.neutron [-] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1991.424368] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789319, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.387076} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.424730] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1991.426618] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9a1667-cbf3-45c5-abec-7323a326c86e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.428892] env[62816]: DEBUG nova.scheduler.client.report [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1991.452791] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1991.455129] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95071d23-015b-42b8-893e-812cf26499ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.471023] env[62816]: DEBUG nova.network.neutron [-] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.472916] env[62816]: DEBUG nova.compute.manager [req-6c581563-2db2-41fe-9a04-fa1f9e06f7f4 req-b394d238-c8dd-4e19-8406-6e07383b573e service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Received event network-vif-deleted-7731c29e-449a-4c40-bb70-5a2c88561abe {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1991.473120] env[62816]: INFO nova.compute.manager [req-6c581563-2db2-41fe-9a04-fa1f9e06f7f4 req-b394d238-c8dd-4e19-8406-6e07383b573e service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Neutron deleted interface 7731c29e-449a-4c40-bb70-5a2c88561abe; detaching it from the instance and deleting it from the info cache [ 1991.473295] env[62816]: DEBUG nova.network.neutron [req-6c581563-2db2-41fe-9a04-fa1f9e06f7f4 req-b394d238-c8dd-4e19-8406-6e07383b573e service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.482539] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1991.482539] env[62816]: value = "task-1789320" [ 1991.482539] env[62816]: _type = "Task" [ 1991.482539] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.494012] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789320, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.643975] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1991.644257] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371209', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'name': 'volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35852805-5776-4b65-96aa-4365b32c66d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'serial': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1991.645210] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969b89bf-19d1-4716-8c30-c5dd390c3be9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.662588] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5c3ba1-dd04-4e63-a1a1-a25da25fbe91 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.688127] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb/volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1991.688418] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53a734c2-ff11-4b11-a3e3-a09819183e34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.710180] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1991.710180] env[62816]: value = "task-1789321" [ 1991.710180] env[62816]: _type = "Task" [ 1991.710180] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.719006] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789321, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.935087] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.174s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1991.935087] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1991.976329] env[62816]: INFO nova.compute.manager [-] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Took 0.97 seconds to deallocate network for instance. [ 1991.977362] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e9bf361b-c439-46c4-80d8-08e312e453d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.993864] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789320, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.997883] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9daf5ce6-7ba7-440f-a3d0-5bb0629a6689 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.035658] env[62816]: DEBUG nova.compute.manager [req-6c581563-2db2-41fe-9a04-fa1f9e06f7f4 req-b394d238-c8dd-4e19-8406-6e07383b573e service nova] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Detach interface failed, port_id=7731c29e-449a-4c40-bb70-5a2c88561abe, reason: Instance 341bf195-e528-4e3b-8636-fac7a383d228 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1992.223483] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789321, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.444981] env[62816]: DEBUG nova.compute.utils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1992.446455] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1992.446656] env[62816]: DEBUG nova.network.neutron [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1992.493241] env[62816]: DEBUG nova.policy [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0a2129bc83a45d695730796b55f1caf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72d49b085afa4df99700ea4e15e9c87e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1992.497974] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789320, 'name': ReconfigVM_Task, 'duration_secs': 0.531089} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.498258] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1992.499038] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74cb1254-b446-4071-bf48-b679035c089d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.506748] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1992.506748] env[62816]: value = "task-1789322" [ 1992.506748] env[62816]: _type = "Task" [ 1992.506748] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.515347] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789322, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.524196] env[62816]: INFO nova.compute.manager [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Took 0.54 seconds to detach 1 volumes for instance. [ 1992.722356] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789321, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.756815] env[62816]: DEBUG nova.network.neutron [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Successfully created port: dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1992.950010] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1993.018485] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789322, 'name': Rename_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.032887] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.033189] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1993.033383] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.058848] env[62816]: INFO nova.scheduler.client.report [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted allocations for instance 341bf195-e528-4e3b-8636-fac7a383d228 [ 1993.222950] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789321, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.455176] env[62816]: INFO nova.virt.block_device [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Booting with volume d04b1984-dcec-45fa-8a8d-eeff8eed3cb3 at /dev/sda [ 1993.485915] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c54177da-7d23-4da8-ab61-ddaef8cb66a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.496189] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920638ac-5b57-42c0-9415-846f94dbcdf4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.516110] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789322, 'name': Rename_Task, 'duration_secs': 0.86326} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.516378] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1993.516606] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b8cfed2-0231-4bb7-9898-2320ea329efc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.524880] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6199ceb4-c0cb-4653-a291-08ef4b0ad0e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.530273] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1993.530273] env[62816]: value = "task-1789323" [ 1993.530273] env[62816]: _type = "Task" [ 1993.530273] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.537069] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e87346-a394-4c9d-bfe6-68281c8ac7ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.555314] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789323, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.570324] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cc82d068-4d9e-423e-9dc9-344b08a65f60 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "341bf195-e528-4e3b-8636-fac7a383d228" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.106s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.572537] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6697bb84-e833-4c77-961b-f2af36a6d68c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.579993] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dec46d85-1c5e-40c9-81a8-72d05bbf4f08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.595031] env[62816]: DEBUG nova.virt.block_device [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating existing volume attachment record: a8dc07fd-719f-4749-b668-178db6d6b7fe {{(pid=62816) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1993.724626] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789321, 'name': ReconfigVM_Task, 'duration_secs': 1.549785} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.724830] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb/volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1993.729505] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8ef2535-0cc6-4b5a-8034-adae54c43058 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.747500] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1993.747500] env[62816]: value = "task-1789324" [ 1993.747500] env[62816]: _type = "Task" [ 1993.747500] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.759071] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789324, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.041890] env[62816]: DEBUG oslo_vmware.api [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789323, 'name': PowerOnVM_Task, 'duration_secs': 0.501133} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.042267] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1994.042579] env[62816]: INFO nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Took 8.97 seconds to spawn the instance on the hypervisor. [ 1994.042822] env[62816]: DEBUG nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1994.043611] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370d000b-435a-4a84-8952-61ca6b21b66c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.233981] env[62816]: DEBUG nova.compute.manager [req-9f52154d-bbc7-4e34-9fe8-86b0df7245bb req-a028c54f-331c-4362-84c4-08e4b9af8250 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Received event network-vif-plugged-dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1994.234269] env[62816]: DEBUG oslo_concurrency.lockutils [req-9f52154d-bbc7-4e34-9fe8-86b0df7245bb req-a028c54f-331c-4362-84c4-08e4b9af8250 service nova] Acquiring lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.234434] env[62816]: DEBUG oslo_concurrency.lockutils [req-9f52154d-bbc7-4e34-9fe8-86b0df7245bb req-a028c54f-331c-4362-84c4-08e4b9af8250 service nova] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.234636] env[62816]: DEBUG oslo_concurrency.lockutils [req-9f52154d-bbc7-4e34-9fe8-86b0df7245bb req-a028c54f-331c-4362-84c4-08e4b9af8250 service nova] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.234822] env[62816]: DEBUG nova.compute.manager [req-9f52154d-bbc7-4e34-9fe8-86b0df7245bb req-a028c54f-331c-4362-84c4-08e4b9af8250 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] No waiting events found dispatching network-vif-plugged-dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1994.235119] env[62816]: WARNING nova.compute.manager [req-9f52154d-bbc7-4e34-9fe8-86b0df7245bb req-a028c54f-331c-4362-84c4-08e4b9af8250 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Received unexpected event network-vif-plugged-dc37e042-ff36-48c9-81a1-a3669e102aae for instance with vm_state building and task_state block_device_mapping. [ 1994.257998] env[62816]: DEBUG oslo_vmware.api [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789324, 'name': ReconfigVM_Task, 'duration_secs': 0.145586} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1994.258316] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371209', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'name': 'volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35852805-5776-4b65-96aa-4365b32c66d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'serial': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1994.321683] env[62816]: DEBUG nova.network.neutron [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Successfully updated port: dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1994.561182] env[62816]: INFO nova.compute.manager [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Took 13.23 seconds to build instance. [ 1994.653619] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.653869] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.824711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.824799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.825032] env[62816]: DEBUG nova.network.neutron [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1995.062945] env[62816]: DEBUG oslo_concurrency.lockutils [None req-cbcfa858-44be-4c9d-91a0-cf8858bb2ac3 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.740s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.156732] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1995.292102] env[62816]: DEBUG nova.objects.instance [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'flavor' on Instance uuid 35852805-5776-4b65-96aa-4365b32c66d5 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1995.355221] env[62816]: DEBUG nova.network.neutron [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1995.495886] env[62816]: DEBUG nova.network.neutron [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.675926] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1995.676334] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1995.676546] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1995.676705] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1995.676901] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1995.677110] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1995.677236] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1995.677439] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1995.677596] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1995.677764] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1995.677926] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1995.678112] env[62816]: DEBUG nova.virt.hardware [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1995.679195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.679420] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.681123] env[62816]: INFO nova.compute.claims [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1995.683964] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ffbdb57-cd79-43dd-a39e-6425907dd667 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.692231] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00149418-61a0-4054-ac8c-6d6180a251ea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.797202] env[62816]: DEBUG oslo_concurrency.lockutils [None req-596f9b0c-64e6-418e-b672-67ca2396a92f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.259s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.979600] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "35852805-5776-4b65-96aa-4365b32c66d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.979840] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.980018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "35852805-5776-4b65-96aa-4365b32c66d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1995.980225] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1995.980396] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.982616] env[62816]: INFO nova.compute.manager [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Terminating instance [ 1995.984480] env[62816]: DEBUG nova.compute.manager [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1995.984748] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1995.984989] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5168fc1-b267-4877-9044-1fca50eae28d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.994521] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1995.994521] env[62816]: value = "task-1789325" [ 1995.994521] env[62816]: _type = "Task" [ 1995.994521] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.998137] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.998418] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Instance network_info: |[{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1995.998832] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:10:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1895250-76cc-41f7-b7f8-2e5679494607', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc37e042-ff36-48c9-81a1-a3669e102aae', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1996.007106] env[62816]: DEBUG oslo.service.loopingcall [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1996.007693] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1996.007917] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02285c9c-0a09-4f5f-aeb6-23c46b485b93 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.026218] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.032224] env[62816]: INFO nova.compute.manager [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Rebuilding instance [ 1996.034086] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1996.034086] env[62816]: value = "task-1789326" [ 1996.034086] env[62816]: _type = "Task" [ 1996.034086] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.041882] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789326, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.067315] env[62816]: DEBUG nova.compute.manager [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1996.068160] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019e7468-10ed-4c73-991d-5608c04711ba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.263911] env[62816]: DEBUG nova.compute.manager [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Received event network-changed-dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1996.264237] env[62816]: DEBUG nova.compute.manager [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Refreshing instance network info cache due to event network-changed-dc37e042-ff36-48c9-81a1-a3669e102aae. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1996.264549] env[62816]: DEBUG oslo_concurrency.lockutils [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] Acquiring lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1996.264845] env[62816]: DEBUG oslo_concurrency.lockutils [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] Acquired lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1996.265116] env[62816]: DEBUG nova.network.neutron [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Refreshing network info cache for port dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1996.504630] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789325, 'name': PowerOffVM_Task, 'duration_secs': 0.363523} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.504880] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1996.505099] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1996.505297] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371209', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'name': 'volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35852805-5776-4b65-96aa-4365b32c66d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'serial': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1996.506121] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23504986-395e-419f-affe-97871639c072 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.526525] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a96b805-25bb-48df-a241-d113717dc661 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.533692] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a9f934-1932-4de4-a9d2-63f376a1dee3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.543549] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789326, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.559019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4004418-1d33-45be-8b04-998edac73985 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.581661] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1996.581977] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] The volume has not been displaced from its original location: [datastore1] volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb/volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1996.587215] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1996.587496] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0062157f-9b13-4f2f-85bd-926485a0cf18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.589050] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a9043ece-91cd-42f5-8bb5-a295eb7cfd4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.608477] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1996.608477] env[62816]: value = "task-1789328" [ 1996.608477] env[62816]: _type = "Task" [ 1996.608477] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.609823] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1996.609823] env[62816]: value = "task-1789327" [ 1996.609823] env[62816]: _type = "Task" [ 1996.609823] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.623030] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789328, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.625715] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.795039] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa88e2d-0cf4-441b-9189-b8ac44408153 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.806221] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-764f0927-cff3-4321-896f-6c7b9f67ff43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.839138] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f0f2e9-ad54-46e3-8b57-ce748c4f2e8c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.854013] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5afcb872-598c-4228-ab89-e34526c5e6dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.868305] env[62816]: DEBUG nova.compute.provider_tree [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.981056] env[62816]: DEBUG nova.network.neutron [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updated VIF entry in instance network info cache for port dc37e042-ff36-48c9-81a1-a3669e102aae. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1996.981442] env[62816]: DEBUG nova.network.neutron [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1997.047391] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789326, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.122781] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789327, 'name': PowerOffVM_Task, 'duration_secs': 0.219937} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.125720] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1997.125969] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1997.126263] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789328, 'name': ReconfigVM_Task, 'duration_secs': 0.262871} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.126969] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa98b02-10a6-4306-af6a-1bae8fc34209 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.129428] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1997.133877] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d796564e-271e-4631-8dfb-8ce0af740e6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.149168] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1997.150319] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43b6177f-3280-4acc-996e-5cd0a68ae14c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.151763] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1997.151763] env[62816]: value = "task-1789329" [ 1997.151763] env[62816]: _type = "Task" [ 1997.151763] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.159597] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.236425] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1997.236776] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1997.237181] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1997.237579] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8b1af19e-f861-4053-8916-859327aefb85 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.246305] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1997.246305] env[62816]: value = "task-1789331" [ 1997.246305] env[62816]: _type = "Task" [ 1997.246305] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.254255] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.371244] env[62816]: DEBUG nova.scheduler.client.report [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1997.483984] env[62816]: DEBUG oslo_concurrency.lockutils [req-99a9e79a-2b29-4a77-ab79-13877b24a343 req-167654b5-3ddc-4346-8e41-b7e69c4f3f79 service nova] Releasing lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1997.546889] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789326, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.661988] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789329, 'name': ReconfigVM_Task, 'duration_secs': 0.234624} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1997.662309] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371209', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'name': 'volume-fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '35852805-5776-4b65-96aa-4365b32c66d5', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb', 'serial': 'fd61a4ee-ec53-4ec3-a17f-92f608bcaabb'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1997.662605] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1997.663353] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc1fcec-4eac-47e0-b45b-d732f8aeb4f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.670022] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1997.670242] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-accb04ca-eddf-4949-9622-84163476c7a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.744595] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1997.744910] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1997.745104] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleting the datastore file [datastore1] 35852805-5776-4b65-96aa-4365b32c66d5 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1997.745370] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b3dd9f0-179c-42c9-b4ea-927f9426552a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.755479] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789331, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.758208] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 1997.758208] env[62816]: value = "task-1789333" [ 1997.758208] env[62816]: _type = "Task" [ 1997.758208] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.766488] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789333, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.878117] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.198s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.878117] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1998.047138] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789326, 'name': CreateVM_Task, 'duration_secs': 1.93109} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.047325] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1998.048066] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'device_type': None, 'boot_index': 0, 'guest_format': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371204', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'name': 'volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c48238b9-7a8a-413c-92af-a0fa4b10fe04', 'attached_at': '', 'detached_at': '', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'serial': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3'}, 'disk_bus': None, 'delete_on_termination': True, 'attachment_id': 'a8dc07fd-719f-4749-b668-178db6d6b7fe', 'volume_type': None}], 'swap': None} {{(pid=62816) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1998.048310] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Root volume attach. Driver type: vmdk {{(pid=62816) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1998.049403] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0315462e-fa0f-497d-9666-c12b6fe89ae7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.057459] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061b104e-d2fb-4686-a092-53c9c75e95f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.063201] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb89952-6208-400f-8d23-f3e5d74ddd62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.068733] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-fac5ce16-2081-424a-85e9-65f317985d03 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.074980] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1998.074980] env[62816]: value = "task-1789334" [ 1998.074980] env[62816]: _type = "Task" [ 1998.074980] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.081767] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789334, 'name': RelocateVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.258521] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789331, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.720473} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.258905] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.259074] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.259255] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.273594] env[62816]: DEBUG oslo_vmware.api [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789333, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.205041} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.273828] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.274020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.274205] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.274378] env[62816]: INFO nova.compute.manager [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Took 2.29 seconds to destroy the instance on the hypervisor. [ 1998.274661] env[62816]: DEBUG oslo.service.loopingcall [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.274840] env[62816]: DEBUG nova.compute.manager [-] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1998.274931] env[62816]: DEBUG nova.network.neutron [-] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1998.384132] env[62816]: DEBUG nova.compute.utils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1998.385691] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1998.385890] env[62816]: DEBUG nova.network.neutron [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1998.424827] env[62816]: DEBUG nova.policy [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a59c608ab954a3ba9cd61a84f30b89f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8c54ea5a5abf4f0298b76f6081de8e60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 1998.541391] env[62816]: DEBUG nova.compute.manager [req-114cf208-633e-4751-8cb6-23d75da095f2 req-146930a8-351f-47c2-a6b2-5ea4bb2c4d88 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Received event network-vif-deleted-942da178-a0ce-4757-9a96-359dd73d7aff {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1998.541604] env[62816]: INFO nova.compute.manager [req-114cf208-633e-4751-8cb6-23d75da095f2 req-146930a8-351f-47c2-a6b2-5ea4bb2c4d88 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Neutron deleted interface 942da178-a0ce-4757-9a96-359dd73d7aff; detaching it from the instance and deleting it from the info cache [ 1998.541777] env[62816]: DEBUG nova.network.neutron [req-114cf208-633e-4751-8cb6-23d75da095f2 req-146930a8-351f-47c2-a6b2-5ea4bb2c4d88 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1998.586183] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789334, 'name': RelocateVM_Task, 'duration_secs': 0.027714} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.586568] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Volume attach. Driver type: vmdk {{(pid=62816) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1998.586727] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371204', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'name': 'volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c48238b9-7a8a-413c-92af-a0fa4b10fe04', 'attached_at': '', 'detached_at': '', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'serial': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1998.587487] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af56dc8b-115e-43d8-96a2-05104afe5903 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.603103] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e38c7f2e-dbd0-433c-9fc9-289f35d95bf1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.626396] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3/volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1998.626969] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-362fb7b9-4688-4950-a89d-a1edb408ac7e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.647325] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1998.647325] env[62816]: value = "task-1789335" [ 1998.647325] env[62816]: _type = "Task" [ 1998.647325] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.655035] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789335, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.724680] env[62816]: DEBUG nova.network.neutron [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Successfully created port: eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1998.888720] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1999.017530] env[62816]: DEBUG nova.network.neutron [-] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.044855] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bcf6d5d-43ff-4890-be2b-a3211427ed02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.066020] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ebcf37-cf51-4c62-8fa5-82fed7e3b22a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.098534] env[62816]: DEBUG nova.compute.manager [req-114cf208-633e-4751-8cb6-23d75da095f2 req-146930a8-351f-47c2-a6b2-5ea4bb2c4d88 service nova] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Detach interface failed, port_id=942da178-a0ce-4757-9a96-359dd73d7aff, reason: Instance 35852805-5776-4b65-96aa-4365b32c66d5 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1999.157425] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789335, 'name': ReconfigVM_Task, 'duration_secs': 0.279753} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.157736] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3/volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1999.162383] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9cb015d-2638-4a33-a283-4e6672f03cf6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.179567] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1999.179567] env[62816]: value = "task-1789336" [ 1999.179567] env[62816]: _type = "Task" [ 1999.179567] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.190438] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789336, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.298916] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1999.299267] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1999.299382] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1999.299541] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1999.299691] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1999.299846] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1999.300055] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1999.300220] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1999.300392] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1999.300555] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1999.300729] env[62816]: DEBUG nova.virt.hardware [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1999.301664] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92a0e15-cde8-4575-8ece-5ba8489557d4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.311090] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce24355b-5a41-4ddd-8a49-b2a794eba0d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.325781] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:ed:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22ed2ee0-3c70-488b-9400-1d013013e5f5', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1999.333023] env[62816]: DEBUG oslo.service.loopingcall [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1999.333285] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1999.333499] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65ae5fda-c00a-45b1-bba0-44a1094c697f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.352209] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1999.352209] env[62816]: value = "task-1789337" [ 1999.352209] env[62816]: _type = "Task" [ 1999.352209] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.361305] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789337, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.520206] env[62816]: INFO nova.compute.manager [-] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Took 1.25 seconds to deallocate network for instance. [ 1999.689707] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789336, 'name': ReconfigVM_Task, 'duration_secs': 0.159634} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.689991] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371204', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'name': 'volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c48238b9-7a8a-413c-92af-a0fa4b10fe04', 'attached_at': '', 'detached_at': '', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'serial': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3'} {{(pid=62816) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1999.690535] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a5a17b8-d741-4a8a-86f1-f0f110e5a075 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.697114] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 1999.697114] env[62816]: value = "task-1789338" [ 1999.697114] env[62816]: _type = "Task" [ 1999.697114] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.711482] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789338, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.862138] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789337, 'name': CreateVM_Task, 'duration_secs': 0.32305} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.862373] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1999.863178] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1999.863372] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1999.863705] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1999.863956] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c26613a5-6864-45e7-9db8-a61664e8847a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.868649] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 1999.868649] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d38f6a-0cc3-606c-4aac-de38684f41da" [ 1999.868649] env[62816]: _type = "Task" [ 1999.868649] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.876418] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d38f6a-0cc3-606c-4aac-de38684f41da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.898789] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1999.918603] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1999.918836] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1999.918996] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1999.919230] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1999.919381] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1999.919533] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1999.919742] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1999.919909] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1999.920089] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1999.920259] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1999.920433] env[62816]: DEBUG nova.virt.hardware [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1999.921253] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b590fe0-1252-45af-8366-91a188e5c410 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.929017] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c9acb15-bfdf-49af-8baf-cd4ecf42777c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.069215] env[62816]: INFO nova.compute.manager [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Took 0.55 seconds to detach 1 volumes for instance. [ 2000.159739] env[62816]: DEBUG nova.compute.manager [req-36290775-ad19-4dfa-a548-593398075ceb req-03414208-2987-49b5-89c2-e0c2dfe0f030 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-vif-plugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2000.159994] env[62816]: DEBUG oslo_concurrency.lockutils [req-36290775-ad19-4dfa-a548-593398075ceb req-03414208-2987-49b5-89c2-e0c2dfe0f030 service nova] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.160210] env[62816]: DEBUG oslo_concurrency.lockutils [req-36290775-ad19-4dfa-a548-593398075ceb req-03414208-2987-49b5-89c2-e0c2dfe0f030 service nova] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.160401] env[62816]: DEBUG oslo_concurrency.lockutils [req-36290775-ad19-4dfa-a548-593398075ceb req-03414208-2987-49b5-89c2-e0c2dfe0f030 service nova] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.160582] env[62816]: DEBUG nova.compute.manager [req-36290775-ad19-4dfa-a548-593398075ceb req-03414208-2987-49b5-89c2-e0c2dfe0f030 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] No waiting events found dispatching network-vif-plugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2000.160704] env[62816]: WARNING nova.compute.manager [req-36290775-ad19-4dfa-a548-593398075ceb req-03414208-2987-49b5-89c2-e0c2dfe0f030 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received unexpected event network-vif-plugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 for instance with vm_state building and task_state spawning. [ 2000.208192] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789338, 'name': Rename_Task, 'duration_secs': 0.134898} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.208581] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2000.208581] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bed644a1-72a3-44ba-a59e-503aa4d2de44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.213790] env[62816]: DEBUG nova.network.neutron [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Successfully updated port: eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2000.217036] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2000.217036] env[62816]: value = "task-1789339" [ 2000.217036] env[62816]: _type = "Task" [ 2000.217036] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.225922] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789339, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.380359] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d38f6a-0cc3-606c-4aac-de38684f41da, 'name': SearchDatastore_Task, 'duration_secs': 0.010361} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.380805] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2000.381146] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2000.381507] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.381743] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.382014] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2000.382479] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-430ef20b-6975-4e60-8a49-5b4961789af3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.390878] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2000.391144] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2000.391968] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8722b2e7-bbef-4f42-a364-485a5a58fac5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.397288] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2000.397288] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5268f4bd-7434-2d88-cbaa-755025573504" [ 2000.397288] env[62816]: _type = "Task" [ 2000.397288] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.406242] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5268f4bd-7434-2d88-cbaa-755025573504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.576426] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.576871] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.577294] env[62816]: DEBUG nova.objects.instance [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'resources' on Instance uuid 35852805-5776-4b65-96aa-4365b32c66d5 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2000.716450] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.716617] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.716813] env[62816]: DEBUG nova.network.neutron [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2000.728106] env[62816]: DEBUG oslo_vmware.api [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789339, 'name': PowerOnVM_Task, 'duration_secs': 0.471131} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.728378] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2000.728581] env[62816]: INFO nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Took 5.05 seconds to spawn the instance on the hypervisor. [ 2000.728761] env[62816]: DEBUG nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2000.729552] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027611f0-bc36-474a-8daf-d12a917d6ec7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.907993] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5268f4bd-7434-2d88-cbaa-755025573504, 'name': SearchDatastore_Task, 'duration_secs': 0.011902} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.908791] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9989aa08-265a-424a-b04c-91a8cd42ebdc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.913725] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2000.913725] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f82ad5-4a12-f258-7877-aa2cab7da610" [ 2000.913725] env[62816]: _type = "Task" [ 2000.913725] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.921118] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f82ad5-4a12-f258-7877-aa2cab7da610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.168992] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4ea19c-03dc-4a40-995b-ebd2316714c5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.178454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e606bd7-df0c-4ee2-a913-77a96c5b6f55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.208533] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283ca7f3-fb08-4363-8efa-1f41180df120 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.215545] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae14012d-4dab-475f-af04-1f24e6d7915a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.229899] env[62816]: DEBUG nova.compute.provider_tree [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.247041] env[62816]: INFO nova.compute.manager [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Took 11.51 seconds to build instance. [ 2001.260468] env[62816]: DEBUG nova.network.neutron [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2001.387620] env[62816]: DEBUG nova.network.neutron [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.424302] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f82ad5-4a12-f258-7877-aa2cab7da610, 'name': SearchDatastore_Task, 'duration_secs': 0.018572} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.424552] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.424826] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2001.425124] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9d1a360-b39e-4f2a-a3c6-4bb62de626e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.431586] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2001.431586] env[62816]: value = "task-1789340" [ 2001.431586] env[62816]: _type = "Task" [ 2001.431586] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.439328] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789340, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.733433] env[62816]: DEBUG nova.scheduler.client.report [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2001.749041] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58ab3639-fab8-4d3b-8c20-1b87bb66e772 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.016s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.890746] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.890886] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Instance network_info: |[{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2001.891284] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:e2:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eadfcc8c-606b-4352-8ce4-4ad681cc07c6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2001.900799] env[62816]: DEBUG oslo.service.loopingcall [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2001.900799] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2001.900799] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b57e2b5-36ec-4fe0-b045-1082b36130ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.923397] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2001.923397] env[62816]: value = "task-1789341" [ 2001.923397] env[62816]: _type = "Task" [ 2001.923397] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.931752] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789341, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.940069] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789340, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503884} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.940425] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2001.940676] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2001.940930] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-370fb59b-9a98-4c8d-b3c2-0bc908012519 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.949980] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2001.949980] env[62816]: value = "task-1789342" [ 2001.949980] env[62816]: _type = "Task" [ 2001.949980] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.962096] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789342, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.238398] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.246207] env[62816]: DEBUG nova.compute.manager [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2002.246421] env[62816]: DEBUG nova.compute.manager [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing instance network info cache due to event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2002.246621] env[62816]: DEBUG oslo_concurrency.lockutils [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.246804] env[62816]: DEBUG oslo_concurrency.lockutils [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.246974] env[62816]: DEBUG nova.network.neutron [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2002.264503] env[62816]: INFO nova.scheduler.client.report [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocations for instance 35852805-5776-4b65-96aa-4365b32c66d5 [ 2002.437084] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789341, 'name': CreateVM_Task, 'duration_secs': 0.413317} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.437084] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2002.437084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.437084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.437084] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2002.437084] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-727f5263-929c-4bca-8737-66e2c09d8348 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.442314] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2002.442314] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521e0b69-1a1d-7442-2798-35608f33d7fd" [ 2002.442314] env[62816]: _type = "Task" [ 2002.442314] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.450460] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521e0b69-1a1d-7442-2798-35608f33d7fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.458061] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.458313] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789342, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074158} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.458763] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2002.459590] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60eb9924-6f48-4155-a895-2f5b33c5e19b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.482333] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2002.482609] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a325745-5ebb-4c1b-bfa6-6c3d9ca5564b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.502655] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2002.502655] env[62816]: value = "task-1789343" [ 2002.502655] env[62816]: _type = "Task" [ 2002.502655] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.511207] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789343, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.771935] env[62816]: DEBUG oslo_concurrency.lockutils [None req-79bb7f75-72ae-4dda-84af-1a68c5a7409e tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "35852805-5776-4b65-96aa-4365b32c66d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.792s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.956104] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521e0b69-1a1d-7442-2798-35608f33d7fd, 'name': SearchDatastore_Task, 'duration_secs': 0.036995} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.956434] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.956674] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2002.960095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.960296] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.960495] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2002.962999] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12c65cf3-1cb7-4dec-b4ee-88955c7c9b23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.989017] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2002.989017] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2002.989017] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c4c0f2d-704d-48b4-b548-578de0ce4c3e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.993529] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2002.993529] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d029a0-4664-61b9-3019-2d7c60e16375" [ 2002.993529] env[62816]: _type = "Task" [ 2002.993529] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.004585] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d029a0-4664-61b9-3019-2d7c60e16375, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.014863] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789343, 'name': ReconfigVM_Task, 'duration_secs': 0.315364} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.015588] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de/40bddbd1-9fa6-4dfb-9131-6c376f9417de.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2003.016425] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c411607f-dfbc-4986-a5d2-e861a431b2a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.025958] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2003.025958] env[62816]: value = "task-1789344" [ 2003.025958] env[62816]: _type = "Task" [ 2003.025958] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.038143] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789344, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.102279] env[62816]: DEBUG nova.network.neutron [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updated VIF entry in instance network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2003.102713] env[62816]: DEBUG nova.network.neutron [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.506150] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d029a0-4664-61b9-3019-2d7c60e16375, 'name': SearchDatastore_Task, 'duration_secs': 0.022905} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.507148] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-856f45e2-089d-44ed-9718-429726f2ee2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.513882] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2003.513882] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521fd200-55d6-6e10-25b8-0f0af840fd78" [ 2003.513882] env[62816]: _type = "Task" [ 2003.513882] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.523461] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521fd200-55d6-6e10-25b8-0f0af840fd78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.535059] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789344, 'name': Rename_Task, 'duration_secs': 0.153227} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.535336] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2003.535590] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98d9009a-0e6c-4293-a0e9-094662310442 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.542539] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2003.542539] env[62816]: value = "task-1789345" [ 2003.542539] env[62816]: _type = "Task" [ 2003.542539] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.551655] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789345, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.605520] env[62816]: DEBUG oslo_concurrency.lockutils [req-bce6e3b1-0aa7-4536-af2e-f6db2fd910dd req-4173d644-2ab4-4376-ae5a-91214ca4cb2d service nova] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.026028] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521fd200-55d6-6e10-25b8-0f0af840fd78, 'name': SearchDatastore_Task, 'duration_secs': 0.012257} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.026397] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2004.026783] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2004.027127] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43731919-195a-4572-bd7e-866c11537153 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.034496] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2004.034496] env[62816]: value = "task-1789346" [ 2004.034496] env[62816]: _type = "Task" [ 2004.034496] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.044586] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789346, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.053682] env[62816]: DEBUG oslo_vmware.api [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789345, 'name': PowerOnVM_Task, 'duration_secs': 0.488482} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.053910] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2004.054256] env[62816]: DEBUG nova.compute.manager [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2004.055094] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd20a9d-77cc-4412-add6-657a63f61c70 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.094429] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.094724] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.276303] env[62816]: DEBUG nova.compute.manager [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Received event network-changed-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2004.276528] env[62816]: DEBUG nova.compute.manager [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Refreshing instance network info cache due to event network-changed-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2004.276755] env[62816]: DEBUG oslo_concurrency.lockutils [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] Acquiring lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.276929] env[62816]: DEBUG oslo_concurrency.lockutils [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] Acquired lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.277137] env[62816]: DEBUG nova.network.neutron [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Refreshing network info cache for port c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2004.456651] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2004.456827] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2004.456955] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2004.490662] env[62816]: DEBUG nova.compute.manager [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2004.548222] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789346, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.573289] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.573576] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.573848] env[62816]: DEBUG nova.objects.instance [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2004.597629] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2004.960971] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Skipping network cache update for instance because it is Building. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2005.004125] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.012793] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.020302] env[62816]: DEBUG nova.network.neutron [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updated VIF entry in instance network info cache for port c1c7e341-ffdc-440b-8b2a-6dff7559b1bd. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2005.020676] env[62816]: DEBUG nova.network.neutron [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [{"id": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "address": "fa:16:3e:de:ed:af", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c7e341-ff", "ovs_interfaceid": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.046945] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789346, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.116509] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.302893] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.303154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.303420] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.303518] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.303795] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.306455] env[62816]: INFO nova.compute.manager [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Terminating instance [ 2005.308503] env[62816]: DEBUG nova.compute.manager [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2005.308722] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2005.309664] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2563d7d4-42b8-470a-8d02-41df84d83028 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.318639] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2005.318871] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a2ad53d-86b0-425a-81ed-c2117cd450f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.326336] env[62816]: DEBUG oslo_vmware.api [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2005.326336] env[62816]: value = "task-1789347" [ 2005.326336] env[62816]: _type = "Task" [ 2005.326336] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.336476] env[62816]: DEBUG oslo_vmware.api [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789347, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.523919] env[62816]: DEBUG oslo_concurrency.lockutils [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] Releasing lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.524304] env[62816]: DEBUG nova.compute.manager [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Received event network-changed-dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2005.524690] env[62816]: DEBUG nova.compute.manager [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Refreshing instance network info cache due to event network-changed-dc37e042-ff36-48c9-81a1-a3669e102aae. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2005.524775] env[62816]: DEBUG oslo_concurrency.lockutils [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] Acquiring lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.524939] env[62816]: DEBUG oslo_concurrency.lockutils [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] Acquired lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.525141] env[62816]: DEBUG nova.network.neutron [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Refreshing network info cache for port dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2005.526643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.526643] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2005.526740] env[62816]: DEBUG nova.objects.instance [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lazy-loading 'info_cache' on Instance uuid 4ab07a21-2685-42bc-af13-b95473993d6f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2005.547071] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789346, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.049423} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.547341] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2005.547557] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2005.547809] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9c09e6e2-e7f1-4bb1-9968-0281b6e4059b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.555430] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2005.555430] env[62816]: value = "task-1789348" [ 2005.555430] env[62816]: _type = "Task" [ 2005.555430] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.563708] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789348, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.582719] env[62816]: DEBUG oslo_concurrency.lockutils [None req-bd9ec433-175f-4cf4-95d6-24dab2881774 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.583914] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.571s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.744488] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.744723] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.835562] env[62816]: DEBUG oslo_vmware.api [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789347, 'name': PowerOffVM_Task, 'duration_secs': 0.364784} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.835839] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2005.836011] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2005.836263] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06239676-21f0-474c-b613-c38556f6965c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.937528] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2005.937935] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2005.938259] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] 40bddbd1-9fa6-4dfb-9131-6c376f9417de {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2005.938663] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-735764f0-4dee-4a67-b7ae-7222b210dffb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.945779] env[62816]: DEBUG oslo_vmware.api [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2005.945779] env[62816]: value = "task-1789350" [ 2005.945779] env[62816]: _type = "Task" [ 2005.945779] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.954350] env[62816]: DEBUG oslo_vmware.api [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789350, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.069266] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789348, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07366} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.069608] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2006.070552] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-372b2db3-1feb-400e-b20b-d87ea188ef00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.088213] env[62816]: INFO nova.compute.claims [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2006.100505] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2006.100966] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fbcb6663-3338-4cdf-81a7-c5ff1a10794d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.125679] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2006.125679] env[62816]: value = "task-1789351" [ 2006.125679] env[62816]: _type = "Task" [ 2006.125679] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.136478] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789351, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.247487] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2006.286828] env[62816]: DEBUG nova.network.neutron [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updated VIF entry in instance network info cache for port dc37e042-ff36-48c9-81a1-a3669e102aae. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2006.287585] env[62816]: DEBUG nova.network.neutron [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.455504] env[62816]: DEBUG oslo_vmware.api [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789350, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.258117} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.455692] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2006.455906] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2006.456121] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2006.456303] env[62816]: INFO nova.compute.manager [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2006.456537] env[62816]: DEBUG oslo.service.loopingcall [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2006.456725] env[62816]: DEBUG nova.compute.manager [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2006.456819] env[62816]: DEBUG nova.network.neutron [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2006.603245] env[62816]: INFO nova.compute.resource_tracker [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating resource usage from migration ce4b779a-2a8b-4ea5-8857-e07b4249145a [ 2006.637171] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789351, 'name': ReconfigVM_Task, 'duration_secs': 0.316679} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.637484] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2006.638119] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16189f23-22a0-4f7f-add4-96605598c8bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.649110] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2006.649110] env[62816]: value = "task-1789352" [ 2006.649110] env[62816]: _type = "Task" [ 2006.649110] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.661833] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789352, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.742873] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0711e244-c168-42d1-ac46-49d5a482110b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.748386] env[62816]: DEBUG nova.compute.manager [req-55c2c9ac-f0ba-4a41-b00c-4bda44fa7c21 req-9f6329e6-8c2c-4be3-a998-72f20480e04e service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Received event network-vif-deleted-22ed2ee0-3c70-488b-9400-1d013013e5f5 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2006.748616] env[62816]: INFO nova.compute.manager [req-55c2c9ac-f0ba-4a41-b00c-4bda44fa7c21 req-9f6329e6-8c2c-4be3-a998-72f20480e04e service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Neutron deleted interface 22ed2ee0-3c70-488b-9400-1d013013e5f5; detaching it from the instance and deleting it from the info cache [ 2006.748824] env[62816]: DEBUG nova.network.neutron [req-55c2c9ac-f0ba-4a41-b00c-4bda44fa7c21 req-9f6329e6-8c2c-4be3-a998-72f20480e04e service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2006.757631] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63aeee3-28f3-4df0-88e4-4879ecaefb58 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.794057] env[62816]: DEBUG oslo_concurrency.lockutils [req-7c24d6b5-7de0-41ec-994d-b1fc9f911ebd req-f8debd0f-90f6-401d-8ffc-a359b3ba965d service nova] Releasing lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.795436] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.796226] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95ec13a-a9dd-4616-890d-3f20ffce7000 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.804165] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2f5cc4-3686-4ff7-8b1d-7f6846f79b9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.817940] env[62816]: DEBUG nova.compute.provider_tree [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2007.158376] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789352, 'name': Rename_Task, 'duration_secs': 0.155311} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.158664] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2007.158959] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5406f88-9351-41ea-a160-01a16edddc43 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.168571] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2007.168571] env[62816]: value = "task-1789353" [ 2007.168571] env[62816]: _type = "Task" [ 2007.168571] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.176661] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789353, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.226555] env[62816]: DEBUG nova.network.neutron [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.244037] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [{"id": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "address": "fa:16:3e:de:ed:af", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1c7e341-ff", "ovs_interfaceid": "c1c7e341-ffdc-440b-8b2a-6dff7559b1bd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.254033] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee537213-025b-4de6-a19c-7162b210988b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.264677] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18175d05-a134-4807-81c2-cc233db53e2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.297105] env[62816]: DEBUG nova.compute.manager [req-55c2c9ac-f0ba-4a41-b00c-4bda44fa7c21 req-9f6329e6-8c2c-4be3-a998-72f20480e04e service nova] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Detach interface failed, port_id=22ed2ee0-3c70-488b-9400-1d013013e5f5, reason: Instance 40bddbd1-9fa6-4dfb-9131-6c376f9417de could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2007.321288] env[62816]: DEBUG nova.scheduler.client.report [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2007.680223] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789353, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.729940] env[62816]: INFO nova.compute.manager [-] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Took 1.27 seconds to deallocate network for instance. [ 2007.746601] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-4ab07a21-2685-42bc-af13-b95473993d6f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.746601] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2007.746853] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.747123] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.747437] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.747663] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2007.747931] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2007.828745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.245s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.828994] env[62816]: INFO nova.compute.manager [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Migrating [ 2007.835767] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.719s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.837253] env[62816]: INFO nova.compute.claims [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2008.180200] env[62816]: DEBUG oslo_vmware.api [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789353, 'name': PowerOnVM_Task, 'duration_secs': 0.795083} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.180515] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2008.180774] env[62816]: INFO nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Took 8.28 seconds to spawn the instance on the hypervisor. [ 2008.180988] env[62816]: DEBUG nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2008.181803] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1cd807-4a57-4eee-a059-8191fba44c20 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.236932] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.251303] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.347215] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.347450] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.347592] env[62816]: DEBUG nova.network.neutron [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2008.705020] env[62816]: INFO nova.compute.manager [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Took 13.04 seconds to build instance. [ 2008.956373] env[62816]: DEBUG nova.compute.manager [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2008.956373] env[62816]: DEBUG nova.compute.manager [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing instance network info cache due to event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2008.956373] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.956373] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.956373] env[62816]: DEBUG nova.network.neutron [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2008.984648] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c22d201-7308-4252-9112-dba48b211f51 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.992994] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9332d50-b1ae-459a-93ea-efd3634dd377 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.031942] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1124c6c-8135-424b-af4f-9373a3e29842 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.040881] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a996dd1-71eb-4895-9edf-b90deb86f9f4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.054839] env[62816]: DEBUG nova.compute.provider_tree [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2009.147033] env[62816]: DEBUG nova.network.neutron [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.207068] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a83a740c-60e9-40a2-ae38-35e5f8f7d13b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.553s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.559843] env[62816]: DEBUG nova.scheduler.client.report [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2009.649309] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.688179] env[62816]: DEBUG nova.network.neutron [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updated VIF entry in instance network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2009.688556] env[62816]: DEBUG nova.network.neutron [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2010.065053] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.065371] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2010.069082] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.273s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.069987] env[62816]: INFO nova.compute.claims [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2010.190639] env[62816]: DEBUG oslo_concurrency.lockutils [req-2e592d07-b706-4429-bf8f-4acb9f336462 req-3a49f924-1d4f-4802-9a30-26da58c9157b service nova] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2010.574295] env[62816]: DEBUG nova.compute.utils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2010.577796] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2010.577989] env[62816]: DEBUG nova.network.neutron [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2010.619288] env[62816]: DEBUG nova.policy [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c5023a18d243b7aafb6d6181f931d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e070d1729247ff83b4ff6997b45385', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2010.877447] env[62816]: DEBUG nova.network.neutron [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Successfully created port: a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2011.079478] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2011.167107] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104ba6cf-bf92-4584-aaeb-5164e7bd1078 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.188853] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2011.231467] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cd6326-2bc0-42de-8ff3-425094aa308d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.240883] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b9e92c-f2e1-4273-b308-229af15cbbc2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.271491] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9bb2f9-1bec-4fc0-b99c-f662c76499e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.279354] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5056b2-2b45-492a-8593-d756a8f8f628 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.293559] env[62816]: DEBUG nova.compute.provider_tree [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2011.697736] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2011.697864] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-746a8dd3-e5a8-4492-a499-4300f59bf03d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.706944] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2011.706944] env[62816]: value = "task-1789354" [ 2011.706944] env[62816]: _type = "Task" [ 2011.706944] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.715278] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789354, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.796336] env[62816]: DEBUG nova.scheduler.client.report [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2012.093440] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2012.119862] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2012.120127] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2012.120290] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2012.120470] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2012.120615] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2012.120759] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2012.120964] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2012.121139] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2012.121308] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2012.121471] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2012.121649] env[62816]: DEBUG nova.virt.hardware [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2012.122524] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9711ed5b-f207-4ab8-ad5f-dca2c5155881 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.130651] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10e90fc-343c-4746-ba01-57820017afcc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.216935] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789354, 'name': PowerOffVM_Task, 'duration_secs': 0.189499} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.217245] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2012.217438] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2012.291813] env[62816]: DEBUG nova.compute.manager [req-15f3b275-09d2-4e57-919c-066eaa5273ef req-4455ee3a-ff66-469d-aa3d-449f32861367 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Received event network-vif-plugged-a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2012.291813] env[62816]: DEBUG oslo_concurrency.lockutils [req-15f3b275-09d2-4e57-919c-066eaa5273ef req-4455ee3a-ff66-469d-aa3d-449f32861367 service nova] Acquiring lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.292142] env[62816]: DEBUG oslo_concurrency.lockutils [req-15f3b275-09d2-4e57-919c-066eaa5273ef req-4455ee3a-ff66-469d-aa3d-449f32861367 service nova] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.292207] env[62816]: DEBUG oslo_concurrency.lockutils [req-15f3b275-09d2-4e57-919c-066eaa5273ef req-4455ee3a-ff66-469d-aa3d-449f32861367 service nova] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.292333] env[62816]: DEBUG nova.compute.manager [req-15f3b275-09d2-4e57-919c-066eaa5273ef req-4455ee3a-ff66-469d-aa3d-449f32861367 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] No waiting events found dispatching network-vif-plugged-a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2012.292503] env[62816]: WARNING nova.compute.manager [req-15f3b275-09d2-4e57-919c-066eaa5273ef req-4455ee3a-ff66-469d-aa3d-449f32861367 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Received unexpected event network-vif-plugged-a93b8888-3507-409d-afe6-ddc568a2c790 for instance with vm_state building and task_state spawning. [ 2012.301207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.233s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.301682] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2012.304148] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.067s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.304377] env[62816]: DEBUG nova.objects.instance [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'resources' on Instance uuid 40bddbd1-9fa6-4dfb-9131-6c376f9417de {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2012.383760] env[62816]: DEBUG nova.network.neutron [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Successfully updated port: a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2012.723443] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2012.723684] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2012.723843] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2012.724038] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2012.724199] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2012.724364] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2012.724634] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2012.724804] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2012.724973] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2012.725186] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2012.725374] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2012.730780] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b136d4eb-1150-49e1-98c5-0c92576d59e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.747562] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2012.747562] env[62816]: value = "task-1789355" [ 2012.747562] env[62816]: _type = "Task" [ 2012.747562] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.755857] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.808835] env[62816]: DEBUG nova.compute.utils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2012.813642] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2012.814012] env[62816]: DEBUG nova.network.neutron [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2012.862232] env[62816]: DEBUG nova.policy [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f53618eedbd4be28d440e1cbd81a8fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53b24724dc3344f0b4206a015e34f2e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2012.886845] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2012.886845] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2012.886972] env[62816]: DEBUG nova.network.neutron [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2012.938849] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff97e072-b819-432c-8209-9d084a607f1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.949308] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3043a716-79bc-430a-83dc-ac7eb1fd0da4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.981191] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90129d20-0b80-4fb2-b69f-de3092ce33b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.989224] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe727d2b-7224-4084-898f-ff0bbf4216c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.002587] env[62816]: DEBUG nova.compute.provider_tree [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2013.112585] env[62816]: DEBUG nova.network.neutron [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Successfully created port: dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2013.258137] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789355, 'name': ReconfigVM_Task, 'duration_secs': 0.202692} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.258495] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2013.317992] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2013.424347] env[62816]: DEBUG nova.network.neutron [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2013.505675] env[62816]: DEBUG nova.scheduler.client.report [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2013.549755] env[62816]: DEBUG nova.network.neutron [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updating instance_info_cache with network_info: [{"id": "a93b8888-3507-409d-afe6-ddc568a2c790", "address": "fa:16:3e:bc:b1:9a", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93b8888-35", "ovs_interfaceid": "a93b8888-3507-409d-afe6-ddc568a2c790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.764714] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2013.764962] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2013.765189] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2013.765390] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2013.765541] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2013.765696] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2013.765900] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2013.766113] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2013.766306] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2013.766477] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2013.766654] env[62816]: DEBUG nova.virt.hardware [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2013.771906] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2013.772209] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-024ee516-1c61-4e84-9f6c-f7c7cb05382d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.791268] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2013.791268] env[62816]: value = "task-1789356" [ 2013.791268] env[62816]: _type = "Task" [ 2013.791268] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.799713] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.011027] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.707s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.013176] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.762s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.013392] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.013491] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2014.014345] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa65f53-c4b6-449d-a53f-cfecd71b21de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.023582] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21f0655-a72d-442f-9317-2d7f52be862b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.038039] env[62816]: INFO nova.scheduler.client.report [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocations for instance 40bddbd1-9fa6-4dfb-9131-6c376f9417de [ 2014.039497] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd69e06-cb95-45c3-8395-ca02b9742628 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.048769] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c6cbe8-335d-4723-ab70-556ed5044657 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.052112] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.052396] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Instance network_info: |[{"id": "a93b8888-3507-409d-afe6-ddc568a2c790", "address": "fa:16:3e:bc:b1:9a", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93b8888-35", "ovs_interfaceid": "a93b8888-3507-409d-afe6-ddc568a2c790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2014.052764] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:b1:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a93b8888-3507-409d-afe6-ddc568a2c790', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2014.060336] env[62816]: DEBUG oslo.service.loopingcall [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2014.060927] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2014.061179] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-485bafcd-063f-4e05-927f-5811a04f8b04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.098753] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180878MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2014.098902] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.099117] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2014.108177] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2014.108177] env[62816]: value = "task-1789357" [ 2014.108177] env[62816]: _type = "Task" [ 2014.108177] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.116438] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789357, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.302156] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789356, 'name': ReconfigVM_Task, 'duration_secs': 0.172026} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.302457] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2014.303254] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c53f7a-0267-413b-b8ed-ec762a179bd5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.327223] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3/volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2014.328478] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-94ed33fd-f89e-4c1e-ba6b-f00a664d8755 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.343971] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2014.347055] env[62816]: DEBUG nova.compute.manager [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Received event network-changed-a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2014.347055] env[62816]: DEBUG nova.compute.manager [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Refreshing instance network info cache due to event network-changed-a93b8888-3507-409d-afe6-ddc568a2c790. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2014.347055] env[62816]: DEBUG oslo_concurrency.lockutils [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] Acquiring lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.347270] env[62816]: DEBUG oslo_concurrency.lockutils [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] Acquired lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.347433] env[62816]: DEBUG nova.network.neutron [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Refreshing network info cache for port a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2014.357057] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2014.357057] env[62816]: value = "task-1789358" [ 2014.357057] env[62816]: _type = "Task" [ 2014.357057] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.366679] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789358, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.375202] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2014.375470] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2014.375631] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2014.375816] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2014.375965] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2014.376133] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2014.376363] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2014.376523] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2014.376692] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2014.376857] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2014.377049] env[62816]: DEBUG nova.virt.hardware [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2014.377919] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad0e315-d611-458c-b90a-00494c2a68d9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.386224] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7757ce8b-af57-4fb0-ae8d-6feae94fd40f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.548255] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c744d1a5-650a-43c5-ba27-7097eba2a5f2 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "40bddbd1-9fa6-4dfb-9131-6c376f9417de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.245s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.619629] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789357, 'name': CreateVM_Task, 'duration_secs': 0.352517} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.619629] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2014.619927] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.620101] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.620439] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2014.621290] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc62f9ed-2490-4178-89e0-f6ac2695f981 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.627997] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2014.627997] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522fff43-1861-a5af-baa8-5147287b925f" [ 2014.627997] env[62816]: _type = "Task" [ 2014.627997] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.638985] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522fff43-1861-a5af-baa8-5147287b925f, 'name': SearchDatastore_Task, 'duration_secs': 0.009132} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.639271] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2014.639356] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2014.639508] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2014.639657] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2014.639840] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2014.640421] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9e9e0ce-0dd1-4797-8f96-d08016f42a0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.653261] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2014.653450] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2014.654197] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f17f1bd-7372-4eff-ae36-b2b769d5e482 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.660196] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2014.660196] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5292bc6b-030c-68d8-f2ec-124f3329c6dc" [ 2014.660196] env[62816]: _type = "Task" [ 2014.660196] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.667621] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5292bc6b-030c-68d8-f2ec-124f3329c6dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.675171] env[62816]: DEBUG nova.network.neutron [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Successfully updated port: dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2014.867395] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789358, 'name': ReconfigVM_Task, 'duration_secs': 0.297696} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.867652] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3/volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2014.867909] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2015.042605] env[62816]: DEBUG nova.network.neutron [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updated VIF entry in instance network info cache for port a93b8888-3507-409d-afe6-ddc568a2c790. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2015.043008] env[62816]: DEBUG nova.network.neutron [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updating instance_info_cache with network_info: [{"id": "a93b8888-3507-409d-afe6-ddc568a2c790", "address": "fa:16:3e:bc:b1:9a", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93b8888-35", "ovs_interfaceid": "a93b8888-3507-409d-afe6-ddc568a2c790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.109448] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Applying migration context for instance c48238b9-7a8a-413c-92af-a0fa4b10fe04 as it has an incoming, in-progress migration ce4b779a-2a8b-4ea5-8857-e07b4249145a. Migration status is migrating {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2015.110540] env[62816]: INFO nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating resource usage from migration ce4b779a-2a8b-4ea5-8857-e07b4249145a [ 2015.126067] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4ab07a21-2685-42bc-af13-b95473993d6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.126333] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5e76d63c-b05c-4e8b-8b90-6110bd7d654c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.126333] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5b87e09d-ae08-4936-8479-c845e25b31b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.126447] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5e6be756-2dba-4977-aad2-61c5e97dc761 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.126568] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Migration ce4b779a-2a8b-4ea5-8857-e07b4249145a is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2015.126682] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b139cd0e-b827-4521-b8e1-8fe5303ed596 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.126797] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance c48238b9-7a8a-413c-92af-a0fa4b10fe04 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.126910] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.127107] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2015.127247] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2112MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2015.172523] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5292bc6b-030c-68d8-f2ec-124f3329c6dc, 'name': SearchDatastore_Task, 'duration_secs': 0.008102} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.173338] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47c01cdd-568e-4bdb-9e71-8563148e1d74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.177693] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.177825] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.177970] env[62816]: DEBUG nova.network.neutron [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2015.183082] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2015.183082] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]526b6073-45f8-f6f5-abf9-3b42fdfe642a" [ 2015.183082] env[62816]: _type = "Task" [ 2015.183082] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.194952] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526b6073-45f8-f6f5-abf9-3b42fdfe642a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.235385] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11838099-9712-4972-adbf-5c4787126fd9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.243314] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d50bd3-93c5-4aa0-be54-09ac53a6bf14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.272789] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b683e612-f3bc-4928-87ab-1639e6ae7988 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.280574] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9556c094-9c80-4105-bea9-d1805a9c2f8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.294093] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2015.374254] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c9bfa8-f68a-4b4d-bbec-e0bc8d7fd637 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.395163] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae65ea0-cb2f-4c44-89b8-be4aca773408 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.413110] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2015.546171] env[62816]: DEBUG oslo_concurrency.lockutils [req-ef44b881-483d-4143-ac82-276260388620 req-104c54a6-4f97-4622-80bd-b9dbe25204ef service nova] Releasing lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.696257] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]526b6073-45f8-f6f5-abf9-3b42fdfe642a, 'name': SearchDatastore_Task, 'duration_secs': 0.011525} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.696607] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2015.696804] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b139cd0e-b827-4521-b8e1-8fe5303ed596/b139cd0e-b827-4521-b8e1-8fe5303ed596.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2015.697150] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f6678ac-8a87-4c35-bb75-290ba2d4988a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.706097] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2015.706097] env[62816]: value = "task-1789359" [ 2015.706097] env[62816]: _type = "Task" [ 2015.706097] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.715259] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789359, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.717220] env[62816]: DEBUG nova.network.neutron [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2015.797040] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2015.857120] env[62816]: DEBUG nova.network.neutron [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Updating instance_info_cache with network_info: [{"id": "dc91c14a-1b97-4e8b-a1e2-742e0a39b089", "address": "fa:16:3e:a9:58:f5", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc91c14a-1b", "ovs_interfaceid": "dc91c14a-1b97-4e8b-a1e2-742e0a39b089", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.217908] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789359, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.303044] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2016.303345] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.204s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.342575] env[62816]: DEBUG nova.compute.manager [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Received event network-vif-plugged-dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2016.342795] env[62816]: DEBUG oslo_concurrency.lockutils [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] Acquiring lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.343016] env[62816]: DEBUG oslo_concurrency.lockutils [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.343211] env[62816]: DEBUG oslo_concurrency.lockutils [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.343382] env[62816]: DEBUG nova.compute.manager [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] No waiting events found dispatching network-vif-plugged-dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2016.343551] env[62816]: WARNING nova.compute.manager [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Received unexpected event network-vif-plugged-dc91c14a-1b97-4e8b-a1e2-742e0a39b089 for instance with vm_state building and task_state spawning. [ 2016.343712] env[62816]: DEBUG nova.compute.manager [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Received event network-changed-dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2016.343867] env[62816]: DEBUG nova.compute.manager [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Refreshing instance network info cache due to event network-changed-dc91c14a-1b97-4e8b-a1e2-742e0a39b089. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2016.344044] env[62816]: DEBUG oslo_concurrency.lockutils [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] Acquiring lock "refresh_cache-152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.359388] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.359667] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance network_info: |[{"id": "dc91c14a-1b97-4e8b-a1e2-742e0a39b089", "address": "fa:16:3e:a9:58:f5", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc91c14a-1b", "ovs_interfaceid": "dc91c14a-1b97-4e8b-a1e2-742e0a39b089", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2016.360197] env[62816]: DEBUG oslo_concurrency.lockutils [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] Acquired lock "refresh_cache-152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.360351] env[62816]: DEBUG nova.network.neutron [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Refreshing network info cache for port dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2016.361497] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:58:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc91c14a-1b97-4e8b-a1e2-742e0a39b089', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2016.368922] env[62816]: DEBUG oslo.service.loopingcall [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2016.369737] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2016.369955] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b0c0973-d745-42d9-a1c4-254376a643ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.390394] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2016.390394] env[62816]: value = "task-1789360" [ 2016.390394] env[62816]: _type = "Task" [ 2016.390394] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.398310] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789360, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.719035] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789359, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539129} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.719035] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b139cd0e-b827-4521-b8e1-8fe5303ed596/b139cd0e-b827-4521-b8e1-8fe5303ed596.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2016.719035] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2016.719353] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-071fab8c-2ea0-4835-a76c-c32cd96f6fab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.726235] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2016.726235] env[62816]: value = "task-1789361" [ 2016.726235] env[62816]: _type = "Task" [ 2016.726235] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.736564] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789361, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.906444] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789360, 'name': CreateVM_Task, 'duration_secs': 0.411281} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.906623] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2016.907286] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.907472] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.908140] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2016.908140] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90bb717b-5906-4185-9901-91fd257d2a13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.912749] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2016.912749] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527b107c-fcc0-eb71-8858-1192a5c36419" [ 2016.912749] env[62816]: _type = "Task" [ 2016.912749] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.920553] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527b107c-fcc0-eb71-8858-1192a5c36419, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.012434] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.012654] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.056366] env[62816]: DEBUG nova.network.neutron [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Updated VIF entry in instance network info cache for port dc91c14a-1b97-4e8b-a1e2-742e0a39b089. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2017.056735] env[62816]: DEBUG nova.network.neutron [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Updating instance_info_cache with network_info: [{"id": "dc91c14a-1b97-4e8b-a1e2-742e0a39b089", "address": "fa:16:3e:a9:58:f5", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc91c14a-1b", "ovs_interfaceid": "dc91c14a-1b97-4e8b-a1e2-742e0a39b089", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.077212] env[62816]: DEBUG nova.network.neutron [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Port dc37e042-ff36-48c9-81a1-a3669e102aae binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2017.237067] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789361, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06832} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.237533] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2017.238155] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052c20ea-91ea-4a17-9977-b660d395095d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.260426] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] b139cd0e-b827-4521-b8e1-8fe5303ed596/b139cd0e-b827-4521-b8e1-8fe5303ed596.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2017.260695] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54721f7c-e711-448d-b2cf-4936247ca2b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.279966] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2017.279966] env[62816]: value = "task-1789362" [ 2017.279966] env[62816]: _type = "Task" [ 2017.279966] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.287996] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789362, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.423333] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527b107c-fcc0-eb71-8858-1192a5c36419, 'name': SearchDatastore_Task, 'duration_secs': 0.010408} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.423610] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.423867] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2017.424128] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.424298] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.424487] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.424748] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e57c15a-f601-49a0-bab6-844382b72bc5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.432596] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.432766] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2017.433475] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6b54b1f-f3e8-4a53-9fd5-c730c95c78b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.438327] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2017.438327] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b2a868-f484-d3b5-57b8-110737a404ac" [ 2017.438327] env[62816]: _type = "Task" [ 2017.438327] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.445757] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b2a868-f484-d3b5-57b8-110737a404ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.518489] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.518686] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2017.559634] env[62816]: DEBUG oslo_concurrency.lockutils [req-4939af46-e2ee-42e9-a521-22b944bad9df req-09f77507-63f0-4127-b3e0-9677916c2460 service nova] Releasing lock "refresh_cache-152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.790259] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789362, 'name': ReconfigVM_Task, 'duration_secs': 0.325247} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.790553] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Reconfigured VM instance instance-0000006b to attach disk [datastore1] b139cd0e-b827-4521-b8e1-8fe5303ed596/b139cd0e-b827-4521-b8e1-8fe5303ed596.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2017.791214] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc55aa9b-b7aa-4f7e-b116-01c27439054b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.798975] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2017.798975] env[62816]: value = "task-1789363" [ 2017.798975] env[62816]: _type = "Task" [ 2017.798975] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.807773] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789363, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.949766] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b2a868-f484-d3b5-57b8-110737a404ac, 'name': SearchDatastore_Task, 'duration_secs': 0.008999} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2017.950702] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9f8c32d-f838-4d20-a896-58564e962895 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.962038] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2017.962038] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]527ab875-9622-1225-fd14-f91fe8f384d1" [ 2017.962038] env[62816]: _type = "Task" [ 2017.962038] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.972235] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527ab875-9622-1225-fd14-f91fe8f384d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.094797] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.095039] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.095251] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.309889] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789363, 'name': Rename_Task, 'duration_secs': 0.174571} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.310272] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2018.310484] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9853cb3f-534a-448b-bcf2-1333ba676767 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.318245] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2018.318245] env[62816]: value = "task-1789364" [ 2018.318245] env[62816]: _type = "Task" [ 2018.318245] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.327810] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789364, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.473402] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]527ab875-9622-1225-fd14-f91fe8f384d1, 'name': SearchDatastore_Task, 'duration_secs': 0.026972} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.473770] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.474058] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2018.474329] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2056c295-ea62-47bc-a7d4-86afa1e33548 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.481719] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2018.481719] env[62816]: value = "task-1789365" [ 2018.481719] env[62816]: _type = "Task" [ 2018.481719] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.489870] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789365, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.830940] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789364, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.992036] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789365, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.485164} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.992274] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2018.992489] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2018.992745] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-029ec615-4d3a-4954-8c7b-d948e0a0a528 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.999619] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2018.999619] env[62816]: value = "task-1789366" [ 2018.999619] env[62816]: _type = "Task" [ 2018.999619] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.008182] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789366, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.135818] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.136043] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.136239] env[62816]: DEBUG nova.network.neutron [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2019.328696] env[62816]: DEBUG oslo_vmware.api [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789364, 'name': PowerOnVM_Task, 'duration_secs': 0.750728} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.329071] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2019.329191] env[62816]: INFO nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Took 7.24 seconds to spawn the instance on the hypervisor. [ 2019.329377] env[62816]: DEBUG nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2019.330141] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c756887-7148-4fbc-b8f3-4e9a89523824 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.511392] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789366, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071182} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.511665] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2019.512430] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20af2184-ccdc-4628-932f-4234912c00a4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.533970] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2019.534224] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66438ef5-9fdd-4312-9772-27150bd571d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.555900] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2019.555900] env[62816]: value = "task-1789367" [ 2019.555900] env[62816]: _type = "Task" [ 2019.555900] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.563874] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789367, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.844862] env[62816]: DEBUG nova.network.neutron [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.848547] env[62816]: INFO nova.compute.manager [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Took 14.75 seconds to build instance. [ 2020.066567] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.350403] env[62816]: DEBUG oslo_concurrency.lockutils [None req-70470380-2f88-4f94-834a-7bb04585d296 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.256s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.350865] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.567574] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789367, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.836825] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.836825] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.836825] env[62816]: INFO nova.compute.manager [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Shelving [ 2020.859400] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a9de6b-7890-4c9c-809b-6f1652ce234d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.866798] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a6b9e0-6154-4d86-a9f5-91944a7eb31c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.070220] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789367, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.343845] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.344122] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3584c247-0a3e-46d9-a32d-e9621e66dd4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.352072] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2021.352072] env[62816]: value = "task-1789368" [ 2021.352072] env[62816]: _type = "Task" [ 2021.352072] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.360437] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.537584] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.537875] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.538113] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.538369] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.538540] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.540660] env[62816]: INFO nova.compute.manager [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Terminating instance [ 2021.542441] env[62816]: DEBUG nova.compute.manager [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2021.542645] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2021.543523] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3cfa32-9582-4b73-915d-5dfb345c6beb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.551249] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2021.551490] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e81ac9d6-8ff9-49ae-86ee-16974535e49c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.558282] env[62816]: DEBUG oslo_vmware.api [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 2021.558282] env[62816]: value = "task-1789369" [ 2021.558282] env[62816]: _type = "Task" [ 2021.558282] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.569880] env[62816]: DEBUG oslo_vmware.api [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.573240] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789367, 'name': ReconfigVM_Task, 'duration_secs': 1.547089} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.573548] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2021.574325] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3edf303-e767-4107-a109-852d580d17b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.582623] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2021.582623] env[62816]: value = "task-1789370" [ 2021.582623] env[62816]: _type = "Task" [ 2021.582623] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.591719] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789370, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.862652] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789368, 'name': PowerOffVM_Task, 'duration_secs': 0.162118} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.862872] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2021.863650] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f25f0b-b3c9-419a-bd2e-846470e3b39e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.881441] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d851d306-95b7-4785-952e-434a440e93fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.970562] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eeb7352-0b5c-4ec8-a261-fbeefceb59fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.990361] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb09d54-7fd2-4ce4-b371-e51607e789a4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.997874] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2022.070514] env[62816]: DEBUG oslo_vmware.api [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789369, 'name': PowerOffVM_Task, 'duration_secs': 0.157441} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.070780] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2022.070959] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2022.071226] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e68d482f-8309-48e1-9995-83fa6e40d7d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.093168] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789370, 'name': Rename_Task, 'duration_secs': 0.161676} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.093460] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2022.093716] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-37067ed1-0d5d-4d52-bb03-a3abcb7fd0d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.099750] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2022.099750] env[62816]: value = "task-1789372" [ 2022.099750] env[62816]: _type = "Task" [ 2022.099750] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.108010] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.391921] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2022.392293] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-c40bccb5-ba8e-4327-a096-baf39e67d5c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.402322] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2022.402322] env[62816]: value = "task-1789373" [ 2022.402322] env[62816]: _type = "Task" [ 2022.402322] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.411834] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789373, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.504220] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2022.504550] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c56d5b6c-5217-4bac-8b23-f0eb25dbf560 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.513373] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2022.513373] env[62816]: value = "task-1789374" [ 2022.513373] env[62816]: _type = "Task" [ 2022.513373] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.522215] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789374, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.610652] env[62816]: DEBUG oslo_vmware.api [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789372, 'name': PowerOnVM_Task, 'duration_secs': 0.463195} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.610652] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2022.610826] env[62816]: INFO nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Took 8.27 seconds to spawn the instance on the hypervisor. [ 2022.610898] env[62816]: DEBUG nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2022.611785] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dead46f-294e-4bb1-ac78-7ba2d523c0dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.641626] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2022.641705] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2022.641908] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleting the datastore file [datastore1] 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2022.642231] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-257cfc55-7514-452c-b2b6-05443fc364a4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.650557] env[62816]: DEBUG oslo_vmware.api [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for the task: (returnval){ [ 2022.650557] env[62816]: value = "task-1789375" [ 2022.650557] env[62816]: _type = "Task" [ 2022.650557] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.660493] env[62816]: DEBUG oslo_vmware.api [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.913572] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789373, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.024691] env[62816]: DEBUG oslo_vmware.api [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789374, 'name': PowerOnVM_Task, 'duration_secs': 0.481032} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.024967] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2023.025168] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0ba3fd1-9bbf-4d2b-9a22-488c28fd5230 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance 'c48238b9-7a8a-413c-92af-a0fa4b10fe04' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2023.132958] env[62816]: INFO nova.compute.manager [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Took 16.38 seconds to build instance. [ 2023.160932] env[62816]: DEBUG oslo_vmware.api [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Task: {'id': task-1789375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158123} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.161341] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2023.161628] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2023.161898] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2023.162201] env[62816]: INFO nova.compute.manager [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Took 1.62 seconds to destroy the instance on the hypervisor. [ 2023.162597] env[62816]: DEBUG oslo.service.loopingcall [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2023.162888] env[62816]: DEBUG nova.compute.manager [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2023.163056] env[62816]: DEBUG nova.network.neutron [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2023.415425] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789373, 'name': CreateSnapshot_Task, 'duration_secs': 0.553031} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.415728] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2023.416544] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa81afc8-716f-453e-9805-8ba78aa94993 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.635435] env[62816]: DEBUG oslo_concurrency.lockutils [None req-141a7d7a-4fcc-4a0f-8447-28e005ec2fe9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.890s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.644645] env[62816]: DEBUG nova.compute.manager [req-09d2aa6d-d43e-40d1-a40c-31b1a53d0039 req-e5d8698e-6d56-4d6e-abd4-2b756f04efaf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Received event network-vif-deleted-850c89e0-1047-4847-b1c9-d9fd0435045e {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2023.644778] env[62816]: INFO nova.compute.manager [req-09d2aa6d-d43e-40d1-a40c-31b1a53d0039 req-e5d8698e-6d56-4d6e-abd4-2b756f04efaf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Neutron deleted interface 850c89e0-1047-4847-b1c9-d9fd0435045e; detaching it from the instance and deleting it from the info cache [ 2023.644941] env[62816]: DEBUG nova.network.neutron [req-09d2aa6d-d43e-40d1-a40c-31b1a53d0039 req-e5d8698e-6d56-4d6e-abd4-2b756f04efaf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.878178] env[62816]: INFO nova.compute.manager [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Rebuilding instance [ 2023.925227] env[62816]: DEBUG nova.compute.manager [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2023.926183] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de2bf2d-7d3b-43c3-ac98-6a5c83cdf001 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.935776] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2023.936031] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c4e95ec9-8e9c-4b21-9449-127041a367bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.947186] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2023.947186] env[62816]: value = "task-1789376" [ 2023.947186] env[62816]: _type = "Task" [ 2023.947186] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.957691] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789376, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.124687] env[62816]: DEBUG nova.network.neutron [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2024.147016] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7bd2a2d7-663b-40dd-9371-401d41c61d9d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.158435] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db41d949-a46c-4ad8-a5e8-abb3e26f117a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.190453] env[62816]: DEBUG nova.compute.manager [req-09d2aa6d-d43e-40d1-a40c-31b1a53d0039 req-e5d8698e-6d56-4d6e-abd4-2b756f04efaf service nova] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Detach interface failed, port_id=850c89e0-1047-4847-b1c9-d9fd0435045e, reason: Instance 5b87e09d-ae08-4936-8479-c845e25b31b4 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2024.449208] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2024.449581] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62d0bbaa-ca47-4a13-b7be-c769b49c0f7c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.461906] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789376, 'name': CloneVM_Task} progress is 12%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.463227] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2024.463227] env[62816]: value = "task-1789377" [ 2024.463227] env[62816]: _type = "Task" [ 2024.463227] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.471116] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.628615] env[62816]: INFO nova.compute.manager [-] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Took 1.47 seconds to deallocate network for instance. [ 2024.960783] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789376, 'name': CloneVM_Task} progress is 12%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.971664] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.136056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.136056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.136056] env[62816]: DEBUG nova.objects.instance [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lazy-loading 'resources' on Instance uuid 5b87e09d-ae08-4936-8479-c845e25b31b4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2025.179526] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.179777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.179982] env[62816]: DEBUG nova.compute.manager [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Going to confirm migration 5 {{(pid=62816) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2025.462042] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789376, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.472830] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789377, 'name': PowerOffVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.724750] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2025.724980] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquired lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2025.725193] env[62816]: DEBUG nova.network.neutron [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2025.725441] env[62816]: DEBUG nova.objects.instance [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'info_cache' on Instance uuid c48238b9-7a8a-413c-92af-a0fa4b10fe04 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2025.759020] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac93ca6-50a6-4cd0-945c-6e9aaace99b5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.768973] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4c2d55-5ad6-43ad-8994-b576ea721b23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.802726] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c57d471-60b6-4c87-b421-c5009c30acce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.811812] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec601fb-5003-406f-b405-d591954a5fbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.826744] env[62816]: DEBUG nova.compute.provider_tree [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2025.962554] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789376, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.974383] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789377, 'name': PowerOffVM_Task, 'duration_secs': 1.199165} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.974596] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2025.974830] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.975618] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4110bb9-5fd8-4d55-991e-f9375b88088b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.983237] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2025.983490] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-11992109-adec-4ecd-84aa-1b8ce5d1e0a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.058292] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2026.058522] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2026.058706] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2026.058973] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68ba368b-c247-4942-8190-12cce57d65fd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.067058] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2026.067058] env[62816]: value = "task-1789379" [ 2026.067058] env[62816]: _type = "Task" [ 2026.067058] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.074699] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.330413] env[62816]: DEBUG nova.scheduler.client.report [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2026.461870] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789376, 'name': CloneVM_Task, 'duration_secs': 2.371261} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.462142] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Created linked-clone VM from snapshot [ 2026.462866] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7277e363-df0d-4bcc-832a-5ec42ce2b4e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.469839] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Uploading image ac302d8c-1510-4114-9e3c-88835ec9c8e9 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2026.494926] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2026.494926] env[62816]: value = "vm-371217" [ 2026.494926] env[62816]: _type = "VirtualMachine" [ 2026.494926] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2026.495236] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a8713a18-7667-4af0-b2c7-750fc1fcb6d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.501967] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lease: (returnval){ [ 2026.501967] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5204361d-ae76-8a03-70b8-52ac86f3c761" [ 2026.501967] env[62816]: _type = "HttpNfcLease" [ 2026.501967] env[62816]: } obtained for exporting VM: (result){ [ 2026.501967] env[62816]: value = "vm-371217" [ 2026.501967] env[62816]: _type = "VirtualMachine" [ 2026.501967] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2026.502215] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the lease: (returnval){ [ 2026.502215] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5204361d-ae76-8a03-70b8-52ac86f3c761" [ 2026.502215] env[62816]: _type = "HttpNfcLease" [ 2026.502215] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2026.508228] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2026.508228] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5204361d-ae76-8a03-70b8-52ac86f3c761" [ 2026.508228] env[62816]: _type = "HttpNfcLease" [ 2026.508228] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2026.576204] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789379, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.835073] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.852140] env[62816]: INFO nova.scheduler.client.report [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Deleted allocations for instance 5b87e09d-ae08-4936-8479-c845e25b31b4 [ 2026.978692] env[62816]: DEBUG nova.network.neutron [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [{"id": "dc37e042-ff36-48c9-81a1-a3669e102aae", "address": "fa:16:3e:37:10:62", "network": {"id": "d81469b1-2a76-49e4-afa0-904f3bfb8ee6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1550674074-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72d49b085afa4df99700ea4e15e9c87e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1895250-76cc-41f7-b7f8-2e5679494607", "external-id": "nsx-vlan-transportzone-785", "segmentation_id": 785, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc37e042-ff", "ovs_interfaceid": "dc37e042-ff36-48c9-81a1-a3669e102aae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.010570] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2027.010570] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5204361d-ae76-8a03-70b8-52ac86f3c761" [ 2027.010570] env[62816]: _type = "HttpNfcLease" [ 2027.010570] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2027.010879] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2027.010879] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5204361d-ae76-8a03-70b8-52ac86f3c761" [ 2027.010879] env[62816]: _type = "HttpNfcLease" [ 2027.010879] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2027.011599] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ee6e53-6ab2-4233-8319-2bc3923038f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.018645] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5208c97c-5997-af43-4666-2be24da91637/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2027.018814] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5208c97c-5997-af43-4666-2be24da91637/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2027.084599] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789379, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.836161} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.084898] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2027.085180] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2027.085410] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2027.104255] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c41fffb5-944a-441d-bf09-26c7c77dc58a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.360222] env[62816]: DEBUG oslo_concurrency.lockutils [None req-77ca9021-634d-4e01-acfd-a0dc49a978b1 tempest-AttachVolumeShelveTestJSON-1779994752 tempest-AttachVolumeShelveTestJSON-1779994752-project-member] Lock "5b87e09d-ae08-4936-8479-c845e25b31b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.822s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.481593] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Releasing lock "refresh_cache-c48238b9-7a8a-413c-92af-a0fa4b10fe04" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2027.483030] env[62816]: DEBUG nova.objects.instance [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'migration_context' on Instance uuid c48238b9-7a8a-413c-92af-a0fa4b10fe04 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2027.985835] env[62816]: DEBUG nova.objects.base [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2027.988588] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94824df7-83f6-4276-ad12-f0b511ebbf04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.009994] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91a33c80-c5e4-467f-a118-c899c1428e61 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.017027] env[62816]: DEBUG oslo_vmware.api [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2028.017027] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e55168-c790-64a7-895b-aa3b606f93d0" [ 2028.017027] env[62816]: _type = "Task" [ 2028.017027] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.027123] env[62816]: DEBUG oslo_vmware.api [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e55168-c790-64a7-895b-aa3b606f93d0, 'name': SearchDatastore_Task, 'duration_secs': 0.006974} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.027494] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.027732] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.121134] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2028.121442] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2028.121609] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.121798] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2028.121947] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.122117] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2028.122403] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2028.122686] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2028.122921] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2028.123223] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2028.123508] env[62816]: DEBUG nova.virt.hardware [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2028.124399] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23a0e76-c021-4d66-85e1-8107bb3ff053 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.132438] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00ed9f5-0386-4dc5-ace0-59086541b597 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.147349] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:58:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc91c14a-1b97-4e8b-a1e2-742e0a39b089', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2028.154777] env[62816]: DEBUG oslo.service.loopingcall [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.155084] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2028.155342] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abf34935-f8a3-4b7a-ad03-28e04ee80186 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.175159] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2028.175159] env[62816]: value = "task-1789381" [ 2028.175159] env[62816]: _type = "Task" [ 2028.175159] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.183509] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789381, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.632211] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e83333-2f11-4ac7-b6f9-ddda239130c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.640140] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f054cf2d-32bc-4414-aa28-d70621449a78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.669646] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60f1bff-b3fe-48d5-919e-39db1a525050 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.679927] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf9a316-6d99-4290-8dc3-65d489d5b57f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.688211] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789381, 'name': CreateVM_Task, 'duration_secs': 0.37339} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.695873] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2028.696389] env[62816]: DEBUG nova.compute.provider_tree [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.698167] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2028.698343] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2028.698674] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2028.699168] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90c18043-2d7d-4036-9504-a28ef25f62f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.703557] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2028.703557] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fce02e-4047-1234-5727-6cec74ab4e6e" [ 2028.703557] env[62816]: _type = "Task" [ 2028.703557] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.711294] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fce02e-4047-1234-5727-6cec74ab4e6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.200441] env[62816]: DEBUG nova.scheduler.client.report [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2029.214743] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fce02e-4047-1234-5727-6cec74ab4e6e, 'name': SearchDatastore_Task, 'duration_secs': 0.014284} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.215067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.215301] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2029.215585] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.215718] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.215911] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2029.216803] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc6a656f-e57f-4156-9c49-3462c25d954b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.225926] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2029.226165] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2029.227269] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8115f55b-4ddf-4783-bca5-0a0551ac44bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.232251] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2029.232251] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522f80a8-9c58-5985-6ab3-81abd8fb17f9" [ 2029.232251] env[62816]: _type = "Task" [ 2029.232251] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.239942] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522f80a8-9c58-5985-6ab3-81abd8fb17f9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.742678] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522f80a8-9c58-5985-6ab3-81abd8fb17f9, 'name': SearchDatastore_Task, 'duration_secs': 0.021252} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.743445] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6115a5f-62eb-494d-8470-57fa3ccd843e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.748501] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2029.748501] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520e29ab-0642-fca2-8541-2952c00badac" [ 2029.748501] env[62816]: _type = "Task" [ 2029.748501] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.756071] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520e29ab-0642-fca2-8541-2952c00badac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.210756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.183s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.260429] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520e29ab-0642-fca2-8541-2952c00badac, 'name': SearchDatastore_Task, 'duration_secs': 0.039648} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.260748] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.261084] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2030.261379] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b286325-d019-4de8-aa64-efc6eae22543 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.269517] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2030.269517] env[62816]: value = "task-1789383" [ 2030.269517] env[62816]: _type = "Task" [ 2030.269517] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.279704] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789383, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.769260] env[62816]: INFO nova.scheduler.client.report [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted allocation for migration ce4b779a-2a8b-4ea5-8857-e07b4249145a [ 2030.782358] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789383, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.281960] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a8ab7d6-f772-4484-b133-dce914715edf tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.102s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.283855] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789383, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.668332} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.284383] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2031.284640] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2031.284936] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4dd05a4b-ad48-4f5f-a459-d2fd71c921d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.291669] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2031.291669] env[62816]: value = "task-1789384" [ 2031.291669] env[62816]: _type = "Task" [ 2031.291669] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.299549] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789384, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.597445] env[62816]: INFO nova.compute.manager [None req-c39c8aa9-32ae-412f-bdb5-a5c9c744d039 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Get console output [ 2031.597691] env[62816]: WARNING nova.virt.vmwareapi.driver [None req-c39c8aa9-32ae-412f-bdb5-a5c9c744d039 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] The console log is missing. Check your VSPC configuration [ 2031.804082] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789384, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06501} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2031.804446] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2031.805113] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1517aaf-c7f4-4151-b168-dc761b415e0a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.827183] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2031.827658] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a23c6e4b-d10f-4433-87f0-b79a0b625ec6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.847429] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2031.847429] env[62816]: value = "task-1789385" [ 2031.847429] env[62816]: _type = "Task" [ 2031.847429] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.855412] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789385, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.357384] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789385, 'name': ReconfigVM_Task, 'duration_secs': 0.422147} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.357631] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4/152b3bdd-82d6-4c8a-9fbd-b220d212cfb4.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2032.358298] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8260d10d-dce0-4353-a073-b6084b0cfd28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.364134] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2032.364134] env[62816]: value = "task-1789386" [ 2032.364134] env[62816]: _type = "Task" [ 2032.364134] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.372677] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789386, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.874091] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789386, 'name': Rename_Task, 'duration_secs': 0.167747} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.874448] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2032.874688] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6e07ae5-3662-4b36-8ff4-b09fe5fb9526 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.881138] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2032.881138] env[62816]: value = "task-1789387" [ 2032.881138] env[62816]: _type = "Task" [ 2032.881138] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.888859] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789387, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.395381] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789387, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.895259] env[62816]: DEBUG oslo_vmware.api [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789387, 'name': PowerOnVM_Task, 'duration_secs': 0.736439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.895804] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2033.895965] env[62816]: DEBUG nova.compute.manager [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2033.897205] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-770079e0-e1a6-4b4d-8474-24083249875a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.417568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.417568] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.417568] env[62816]: DEBUG nova.objects.instance [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62816) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2035.427835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-73d08c3b-7242-4142-8be6-d89bee2a6bf9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.429238] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.429451] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.429651] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.429905] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.430130] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.432068] env[62816]: INFO nova.compute.manager [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Terminating instance [ 2035.433950] env[62816]: DEBUG nova.compute.manager [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2035.434183] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2035.435099] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eab974-af6a-4089-b5f9-3e9c95a771e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.443444] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2035.444337] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1548f3e5-d61d-45f6-ae57-4978eb60f481 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.451363] env[62816]: DEBUG oslo_vmware.api [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2035.451363] env[62816]: value = "task-1789388" [ 2035.451363] env[62816]: _type = "Task" [ 2035.451363] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.460159] env[62816]: DEBUG oslo_vmware.api [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.897752] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5208c97c-5997-af43-4666-2be24da91637/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2035.898660] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35175d87-f938-496e-a72c-4cb1451a0c5e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.908643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2035.908871] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2035.909902] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5208c97c-5997-af43-4666-2be24da91637/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2035.910087] env[62816]: ERROR oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5208c97c-5997-af43-4666-2be24da91637/disk-0.vmdk due to incomplete transfer. [ 2035.910472] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e12ef1ff-2c40-46c4-a325-b652cfc020cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.918184] env[62816]: DEBUG oslo_vmware.rw_handles [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5208c97c-5997-af43-4666-2be24da91637/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2035.918378] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Uploaded image ac302d8c-1510-4114-9e3c-88835ec9c8e9 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2035.920617] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2035.921339] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ebb1b354-4d8a-4788-8464-f578c21de485 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.927038] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2035.927038] env[62816]: value = "task-1789389" [ 2035.927038] env[62816]: _type = "Task" [ 2035.927038] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.934447] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789389, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.960778] env[62816]: DEBUG oslo_vmware.api [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789388, 'name': PowerOffVM_Task, 'duration_secs': 0.193279} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.961041] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2035.961221] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2035.961461] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d9fd520f-07f1-4257-afd3-a8a9712f1e9e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.042426] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2036.042649] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2036.042832] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2036.043114] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66148490-70ff-4b18-966b-8428a74ac5c1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.049312] env[62816]: DEBUG oslo_vmware.api [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2036.049312] env[62816]: value = "task-1789391" [ 2036.049312] env[62816]: _type = "Task" [ 2036.049312] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.057344] env[62816]: DEBUG oslo_vmware.api [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789391, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.411401] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2036.437527] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789389, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.559082] env[62816]: DEBUG oslo_vmware.api [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789391, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318965} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.559338] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2036.559525] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2036.559703] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2036.559881] env[62816]: INFO nova.compute.manager [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2036.560138] env[62816]: DEBUG oslo.service.loopingcall [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.560336] env[62816]: DEBUG nova.compute.manager [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2036.560431] env[62816]: DEBUG nova.network.neutron [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2036.827711] env[62816]: DEBUG nova.compute.manager [req-a57bf803-c084-40c0-930d-a8ab42a65155 req-4fc8c9c0-d462-4ab0-aa49-0964a70cf501 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Received event network-vif-deleted-dc91c14a-1b97-4e8b-a1e2-742e0a39b089 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2036.827711] env[62816]: INFO nova.compute.manager [req-a57bf803-c084-40c0-930d-a8ab42a65155 req-4fc8c9c0-d462-4ab0-aa49-0964a70cf501 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Neutron deleted interface dc91c14a-1b97-4e8b-a1e2-742e0a39b089; detaching it from the instance and deleting it from the info cache [ 2036.827884] env[62816]: DEBUG nova.network.neutron [req-a57bf803-c084-40c0-930d-a8ab42a65155 req-4fc8c9c0-d462-4ab0-aa49-0964a70cf501 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.934482] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.934742] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.936246] env[62816]: INFO nova.compute.claims [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2036.943874] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789389, 'name': Destroy_Task, 'duration_secs': 0.540842} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.944124] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Destroyed the VM [ 2036.944365] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2036.944608] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b729f6c0-b974-4f6b-8b31-23296ee8704e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.951877] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2036.951877] env[62816]: value = "task-1789392" [ 2036.951877] env[62816]: _type = "Task" [ 2036.951877] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.960461] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789392, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.304383] env[62816]: DEBUG nova.network.neutron [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.329941] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b3c19c6-ea51-495a-956e-f3ccd5e723d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.339830] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd0f48e-0e00-4460-8100-b8bc36717d4f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.367023] env[62816]: DEBUG nova.compute.manager [req-a57bf803-c084-40c0-930d-a8ab42a65155 req-4fc8c9c0-d462-4ab0-aa49-0964a70cf501 service nova] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Detach interface failed, port_id=dc91c14a-1b97-4e8b-a1e2-742e0a39b089, reason: Instance 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2037.462793] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789392, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.807947] env[62816]: INFO nova.compute.manager [-] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Took 1.25 seconds to deallocate network for instance. [ 2037.963374] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789392, 'name': RemoveSnapshot_Task, 'duration_secs': 0.539931} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.966064] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2037.966397] env[62816]: DEBUG nova.compute.manager [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2037.967716] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1d832b-9f93-4ddb-996c-6509326f829d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.042713] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09631def-a03a-4760-afe0-94410a3d0079 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.050274] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7144e77-4373-447b-b00b-6e3c041c2625 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.081473] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3b3510-35f2-4fa4-b382-4d60a2b2271b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.088784] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1724e593-d24d-4e6c-a87e-ff577dca8f37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.101818] env[62816]: DEBUG nova.compute.provider_tree [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2038.314777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2038.479870] env[62816]: INFO nova.compute.manager [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Shelve offloading [ 2038.481481] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2038.481735] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b5eefae-1f0a-4bdc-9866-e984312097cc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.489118] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2038.489118] env[62816]: value = "task-1789393" [ 2038.489118] env[62816]: _type = "Task" [ 2038.489118] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.496453] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789393, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.604765] env[62816]: DEBUG nova.scheduler.client.report [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2038.999922] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2039.000110] env[62816]: DEBUG nova.compute.manager [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2039.000829] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c8968e-f861-4788-9689-6dfce4dc7952 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.006143] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.006310] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.006482] env[62816]: DEBUG nova.network.neutron [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2039.109697] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.175s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.110246] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2039.112668] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.798s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2039.113310] env[62816]: DEBUG nova.objects.instance [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'resources' on Instance uuid 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2039.616031] env[62816]: DEBUG nova.compute.utils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2039.620489] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2039.620671] env[62816]: DEBUG nova.network.neutron [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2039.661053] env[62816]: DEBUG nova.policy [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f53618eedbd4be28d440e1cbd81a8fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53b24724dc3344f0b4206a015e34f2e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2039.697856] env[62816]: DEBUG nova.network.neutron [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updating instance_info_cache with network_info: [{"id": "a93b8888-3507-409d-afe6-ddc568a2c790", "address": "fa:16:3e:bc:b1:9a", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa93b8888-35", "ovs_interfaceid": "a93b8888-3507-409d-afe6-ddc568a2c790", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2039.712346] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd54f14-7dd4-4aa3-acff-582430648df9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.719812] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91d53d7-ccd7-4343-a4cb-530655c54bde {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.749701] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33146b0d-2846-491d-a83f-9a0964009edd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.756507] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d703702-be9a-4630-85cc-b807ec993dc4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.769419] env[62816]: DEBUG nova.compute.provider_tree [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2039.897974] env[62816]: DEBUG nova.network.neutron [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Successfully created port: aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2040.121153] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2040.200916] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.272921] env[62816]: DEBUG nova.scheduler.client.report [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2040.421142] env[62816]: DEBUG nova.compute.manager [req-f09f9fa1-7e9c-4e7b-a640-d42bccf2288b req-54a0f5d7-1fe4-43cf-a565-3ccaf29b9547 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Received event network-vif-unplugged-a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2040.421142] env[62816]: DEBUG oslo_concurrency.lockutils [req-f09f9fa1-7e9c-4e7b-a640-d42bccf2288b req-54a0f5d7-1fe4-43cf-a565-3ccaf29b9547 service nova] Acquiring lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2040.421142] env[62816]: DEBUG oslo_concurrency.lockutils [req-f09f9fa1-7e9c-4e7b-a640-d42bccf2288b req-54a0f5d7-1fe4-43cf-a565-3ccaf29b9547 service nova] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.421142] env[62816]: DEBUG oslo_concurrency.lockutils [req-f09f9fa1-7e9c-4e7b-a640-d42bccf2288b req-54a0f5d7-1fe4-43cf-a565-3ccaf29b9547 service nova] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.421766] env[62816]: DEBUG nova.compute.manager [req-f09f9fa1-7e9c-4e7b-a640-d42bccf2288b req-54a0f5d7-1fe4-43cf-a565-3ccaf29b9547 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] No waiting events found dispatching network-vif-unplugged-a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2040.422120] env[62816]: WARNING nova.compute.manager [req-f09f9fa1-7e9c-4e7b-a640-d42bccf2288b req-54a0f5d7-1fe4-43cf-a565-3ccaf29b9547 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Received unexpected event network-vif-unplugged-a93b8888-3507-409d-afe6-ddc568a2c790 for instance with vm_state shelved and task_state shelving_offloading. [ 2040.443103] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2040.443103] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f5dc51-7fea-49b9-ae8e-1eafa25cbb0e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.451017] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2040.451017] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59cadb92-6b49-44ec-abc9-02ffa6686716 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.532020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2040.532020] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2040.532020] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleting the datastore file [datastore1] b139cd0e-b827-4521-b8e1-8fe5303ed596 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2040.532020] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5b5f901-35d5-45b3-a5f2-87e658aaaf12 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.537850] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2040.537850] env[62816]: value = "task-1789395" [ 2040.537850] env[62816]: _type = "Task" [ 2040.537850] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.550019] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789395, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.779062] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.665s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.802348] env[62816]: INFO nova.scheduler.client.report [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocations for instance 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4 [ 2041.048164] env[62816]: DEBUG oslo_vmware.api [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789395, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140869} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.048379] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2041.048551] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2041.048730] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2041.065080] env[62816]: INFO nova.scheduler.client.report [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocations for instance b139cd0e-b827-4521-b8e1-8fe5303ed596 [ 2041.135477] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2041.160505] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2041.160763] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2041.160925] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2041.161139] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2041.161276] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2041.161421] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2041.161628] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2041.161789] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2041.161961] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2041.162208] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2041.162311] env[62816]: DEBUG nova.virt.hardware [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2041.163488] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b19d2e-1221-4049-a52f-71de4f7b8889 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.171139] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d363190-1c62-4079-8a34-9352e3515176 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.262723] env[62816]: DEBUG nova.compute.manager [req-6979aa1a-8163-4560-8849-a9461a4d46f9 req-d310eb58-7512-4b4f-8655-0ef231df8899 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Received event network-vif-plugged-aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2041.262954] env[62816]: DEBUG oslo_concurrency.lockutils [req-6979aa1a-8163-4560-8849-a9461a4d46f9 req-d310eb58-7512-4b4f-8655-0ef231df8899 service nova] Acquiring lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.263196] env[62816]: DEBUG oslo_concurrency.lockutils [req-6979aa1a-8163-4560-8849-a9461a4d46f9 req-d310eb58-7512-4b4f-8655-0ef231df8899 service nova] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.263364] env[62816]: DEBUG oslo_concurrency.lockutils [req-6979aa1a-8163-4560-8849-a9461a4d46f9 req-d310eb58-7512-4b4f-8655-0ef231df8899 service nova] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.263534] env[62816]: DEBUG nova.compute.manager [req-6979aa1a-8163-4560-8849-a9461a4d46f9 req-d310eb58-7512-4b4f-8655-0ef231df8899 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] No waiting events found dispatching network-vif-plugged-aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2041.263690] env[62816]: WARNING nova.compute.manager [req-6979aa1a-8163-4560-8849-a9461a4d46f9 req-d310eb58-7512-4b4f-8655-0ef231df8899 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Received unexpected event network-vif-plugged-aa1cc566-fcd4-44bc-a585-ddab737b1a55 for instance with vm_state building and task_state spawning. [ 2041.310445] env[62816]: DEBUG oslo_concurrency.lockutils [None req-2e1b6a9b-be01-4c65-a9fd-447f8047d960 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "152b3bdd-82d6-4c8a-9fbd-b220d212cfb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.881s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2041.352069] env[62816]: DEBUG nova.network.neutron [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Successfully updated port: aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2041.570170] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.570501] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.570745] env[62816]: DEBUG nova.objects.instance [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'resources' on Instance uuid b139cd0e-b827-4521-b8e1-8fe5303ed596 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2041.857567] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.857835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.857985] env[62816]: DEBUG nova.network.neutron [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2042.073437] env[62816]: DEBUG nova.objects.instance [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'numa_topology' on Instance uuid b139cd0e-b827-4521-b8e1-8fe5303ed596 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2042.398864] env[62816]: DEBUG nova.network.neutron [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2042.414451] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.449767] env[62816]: DEBUG nova.compute.manager [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Received event network-changed-a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2042.449972] env[62816]: DEBUG nova.compute.manager [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Refreshing instance network info cache due to event network-changed-a93b8888-3507-409d-afe6-ddc568a2c790. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2042.450240] env[62816]: DEBUG oslo_concurrency.lockutils [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] Acquiring lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2042.450325] env[62816]: DEBUG oslo_concurrency.lockutils [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] Acquired lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2042.450487] env[62816]: DEBUG nova.network.neutron [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Refreshing network info cache for port a93b8888-3507-409d-afe6-ddc568a2c790 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2042.525301] env[62816]: DEBUG nova.network.neutron [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [{"id": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "address": "fa:16:3e:ce:a1:cc", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1cc566-fc", "ovs_interfaceid": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.575702] env[62816]: DEBUG nova.objects.base [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2042.648155] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34496473-83b1-4e86-8802-c0027550ca1f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.655704] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d45820-5b03-4c4a-b684-44115788b1d0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.684953] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08caaf43-a6c4-4cdd-a40e-6156329944c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.691627] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a545c8fd-0565-4144-ba40-b087ebcac218 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.704779] env[62816]: DEBUG nova.compute.provider_tree [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2043.028100] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.028428] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Instance network_info: |[{"id": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "address": "fa:16:3e:ce:a1:cc", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1cc566-fc", "ovs_interfaceid": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2043.028839] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:a1:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'aa1cc566-fcd4-44bc-a585-ddab737b1a55', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2043.036372] env[62816]: DEBUG oslo.service.loopingcall [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2043.038582] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2043.038825] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94b2c1c7-4dfd-4d37-aa37-f9465d95f16b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.060098] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2043.060098] env[62816]: value = "task-1789396" [ 2043.060098] env[62816]: _type = "Task" [ 2043.060098] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.067504] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789396, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.169425] env[62816]: DEBUG nova.network.neutron [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updated VIF entry in instance network info cache for port a93b8888-3507-409d-afe6-ddc568a2c790. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2043.169952] env[62816]: DEBUG nova.network.neutron [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updating instance_info_cache with network_info: [{"id": "a93b8888-3507-409d-afe6-ddc568a2c790", "address": "fa:16:3e:bc:b1:9a", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": null, "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa93b8888-35", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2043.208136] env[62816]: DEBUG nova.scheduler.client.report [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2043.290708] env[62816]: DEBUG nova.compute.manager [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Received event network-changed-aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2043.290916] env[62816]: DEBUG nova.compute.manager [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Refreshing instance network info cache due to event network-changed-aa1cc566-fcd4-44bc-a585-ddab737b1a55. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2043.291124] env[62816]: DEBUG oslo_concurrency.lockutils [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] Acquiring lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2043.291219] env[62816]: DEBUG oslo_concurrency.lockutils [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] Acquired lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2043.291344] env[62816]: DEBUG nova.network.neutron [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Refreshing network info cache for port aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2043.569390] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789396, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.672683] env[62816]: DEBUG oslo_concurrency.lockutils [req-29aacddd-c6e5-41bf-a5a3-445634ad7b00 req-b34b4aeb-06a9-4172-8a76-08f1a044f9d0 service nova] Releasing lock "refresh_cache-b139cd0e-b827-4521-b8e1-8fe5303ed596" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2043.713968] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.973190] env[62816]: DEBUG nova.network.neutron [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updated VIF entry in instance network info cache for port aa1cc566-fcd4-44bc-a585-ddab737b1a55. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2043.973668] env[62816]: DEBUG nova.network.neutron [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [{"id": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "address": "fa:16:3e:ce:a1:cc", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1cc566-fc", "ovs_interfaceid": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2044.070190] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789396, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.222369] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e3c8cd22-f481-47c0-8839-7cec96e861c8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.386s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.223290] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.809s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.223519] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.223879] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.223958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.225488] env[62816]: INFO nova.compute.manager [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Terminating instance [ 2044.227042] env[62816]: DEBUG nova.compute.manager [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2044.227250] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2044.227502] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9ee04ab-d017-48cc-b8f7-cfaf4ae94773 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.237611] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3200b6ed-1cd8-4679-a1fb-55d338b22881 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.264907] env[62816]: WARNING nova.virt.vmwareapi.vmops [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b139cd0e-b827-4521-b8e1-8fe5303ed596 could not be found. [ 2044.265115] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2044.265290] env[62816]: INFO nova.compute.manager [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2044.265522] env[62816]: DEBUG oslo.service.loopingcall [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2044.265968] env[62816]: DEBUG nova.compute.manager [-] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2044.266084] env[62816]: DEBUG nova.network.neutron [-] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2044.476059] env[62816]: DEBUG oslo_concurrency.lockutils [req-7b292aaf-2206-4792-9b11-111b9abf68f7 req-0053d529-18a1-4e94-9b7e-faea3bc9b815 service nova] Releasing lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2044.570630] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789396, 'name': CreateVM_Task, 'duration_secs': 1.323305} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.570893] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2044.571471] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2044.571643] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2044.571962] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2044.572223] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0853eb14-49d7-4314-8a87-ff48dd484f96 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.576465] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2044.576465] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ccea58-050a-a171-ab67-9a4516b5ecaf" [ 2044.576465] env[62816]: _type = "Task" [ 2044.576465] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.584735] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ccea58-050a-a171-ab67-9a4516b5ecaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.952252] env[62816]: DEBUG nova.network.neutron [-] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.087253] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ccea58-050a-a171-ab67-9a4516b5ecaf, 'name': SearchDatastore_Task, 'duration_secs': 0.010271} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.087724] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2045.088010] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2045.088356] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2045.088582] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2045.088848] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2045.089178] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-78e507b8-11a4-40f4-a5a5-38493cc0aaad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.097340] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2045.097521] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2045.098212] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e858a909-d395-42bc-ae71-01a8b518fed8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.103213] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2045.103213] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52223cc4-82aa-526e-2f8e-61fa8e967233" [ 2045.103213] env[62816]: _type = "Task" [ 2045.103213] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.110329] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52223cc4-82aa-526e-2f8e-61fa8e967233, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.454514] env[62816]: INFO nova.compute.manager [-] [instance: b139cd0e-b827-4521-b8e1-8fe5303ed596] Took 1.19 seconds to deallocate network for instance. [ 2045.614365] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52223cc4-82aa-526e-2f8e-61fa8e967233, 'name': SearchDatastore_Task, 'duration_secs': 0.008471} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.615279] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d0bf6f3-4406-47e9-9eef-f0f41fbfad5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.620520] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2045.620520] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5290e9c8-d141-f82b-5fdb-f36662de46f8" [ 2045.620520] env[62816]: _type = "Task" [ 2045.620520] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.628361] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5290e9c8-d141-f82b-5fdb-f36662de46f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.130698] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5290e9c8-d141-f82b-5fdb-f36662de46f8, 'name': SearchDatastore_Task, 'duration_secs': 0.009178} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.130948] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2046.131245] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f/a17c2b1f-47f2-4076-8e99-55e8189e952f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2046.131503] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69aebb83-5dcf-476b-a271-161abe317594 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.138210] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2046.138210] env[62816]: value = "task-1789397" [ 2046.138210] env[62816]: _type = "Task" [ 2046.138210] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.145714] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789397, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.373549] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.373958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.374263] env[62816]: INFO nova.compute.manager [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Shelving [ 2046.479371] env[62816]: DEBUG oslo_concurrency.lockutils [None req-3faa8b4a-654a-43c2-ad6d-ef267ddfff16 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "b139cd0e-b827-4521-b8e1-8fe5303ed596" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.256s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.650793] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789397, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446084} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.651200] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f/a17c2b1f-47f2-4076-8e99-55e8189e952f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2046.651432] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2046.651706] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3679bb0-fa4c-48e9-9305-7c9be9c3941f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.658718] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2046.658718] env[62816]: value = "task-1789398" [ 2046.658718] env[62816]: _type = "Task" [ 2046.658718] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.666749] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789398, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.882300] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2046.882548] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d996d2ab-501f-44b5-9d12-712e1efa3546 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.889657] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2046.889657] env[62816]: value = "task-1789399" [ 2046.889657] env[62816]: _type = "Task" [ 2046.889657] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.897913] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.169468] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789398, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080847} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.169754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2047.170549] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7772d68-9093-41d4-a621-1c0234deb822 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.192221] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f/a17c2b1f-47f2-4076-8e99-55e8189e952f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2047.192525] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b5f70f6-10cf-4971-a55d-98642968d1ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.211797] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2047.211797] env[62816]: value = "task-1789400" [ 2047.211797] env[62816]: _type = "Task" [ 2047.211797] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.219811] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789400, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.237356] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.237590] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.399206] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789399, 'name': PowerOffVM_Task, 'duration_secs': 0.236517} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.399483] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2047.400270] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83af8d5a-dd82-452f-93d2-b63ec0a79832 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.417839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a698d1c4-7c06-46f1-bb96-38333fd362f7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.724058] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789400, 'name': ReconfigVM_Task, 'duration_secs': 0.248593} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.724219] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Reconfigured VM instance instance-0000006d to attach disk [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f/a17c2b1f-47f2-4076-8e99-55e8189e952f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2047.724684] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c1d524b-1719-4144-990a-ff1fadc4e289 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.731010] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2047.731010] env[62816]: value = "task-1789401" [ 2047.731010] env[62816]: _type = "Task" [ 2047.731010] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.738555] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789401, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.739971] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2047.928230] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Creating Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2047.928552] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ca00bb84-e7f2-4690-87db-3f34c8a4f231 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.936557] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2047.936557] env[62816]: value = "task-1789402" [ 2047.936557] env[62816]: _type = "Task" [ 2047.936557] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.944398] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789402, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.248069] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789401, 'name': Rename_Task, 'duration_secs': 0.139507} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.249487] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2048.249956] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2133bb19-51fd-4abd-ad2d-8d8191eb7769 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.256389] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2048.256389] env[62816]: value = "task-1789403" [ 2048.256389] env[62816]: _type = "Task" [ 2048.256389] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.263804] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789403, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.264777] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.265013] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.266480] env[62816]: INFO nova.compute.claims [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2048.446503] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789402, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.766957] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789403, 'name': PowerOnVM_Task} progress is 78%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.947397] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789402, 'name': CreateSnapshot_Task, 'duration_secs': 0.564768} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.947679] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Created Snapshot of the VM instance {{(pid=62816) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2048.948433] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e16db7-c789-463c-af71-f0ae2afbbf23 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.267774] env[62816]: DEBUG oslo_vmware.api [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789403, 'name': PowerOnVM_Task, 'duration_secs': 0.883503} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.268108] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2049.268259] env[62816]: INFO nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Took 8.13 seconds to spawn the instance on the hypervisor. [ 2049.268446] env[62816]: DEBUG nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2049.269247] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33db0a5c-3081-4e55-864d-30660214b9c2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.359107] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d56583-330c-4033-baaa-607b5fc233d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.367517] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197323ce-864a-4ae2-842a-a98595743033 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.396648] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad17d6f-af67-4b72-acf1-9efc50be94f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.403511] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae9f31c-131d-40d6-a32c-e4d96ccf541e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.416285] env[62816]: DEBUG nova.compute.provider_tree [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2049.465111] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Creating linked-clone VM from snapshot {{(pid=62816) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2049.465400] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6da2d602-2228-4dfc-9a79-e3775062c9d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.474521] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2049.474521] env[62816]: value = "task-1789404" [ 2049.474521] env[62816]: _type = "Task" [ 2049.474521] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2049.482010] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789404, 'name': CloneVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2049.789440] env[62816]: INFO nova.compute.manager [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Took 12.87 seconds to build instance. [ 2049.919607] env[62816]: DEBUG nova.scheduler.client.report [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2049.985170] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789404, 'name': CloneVM_Task} progress is 94%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.291730] env[62816]: DEBUG oslo_concurrency.lockutils [None req-23abd52d-a059-471d-829c-e4b4e394da4f tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.383s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.424824] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.160s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.425430] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2050.486208] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789404, 'name': CloneVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2050.931057] env[62816]: DEBUG nova.compute.utils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2050.932357] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2050.932533] env[62816]: DEBUG nova.network.neutron [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2050.982087] env[62816]: DEBUG nova.policy [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c5023a18d243b7aafb6d6181f931d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e070d1729247ff83b4ff6997b45385', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2050.987190] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789404, 'name': CloneVM_Task, 'duration_secs': 1.058761} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2050.987490] env[62816]: INFO nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Created linked-clone VM from snapshot [ 2050.988230] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884be663-3655-4d24-81e3-0df96f06ee90 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.995341] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Uploading image 342dc3e0-1510-4bf8-b31a-a9336941a298 {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2051.020142] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2051.020142] env[62816]: value = "vm-371221" [ 2051.020142] env[62816]: _type = "VirtualMachine" [ 2051.020142] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2051.020489] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-755b526d-1921-4cc2-a9df-afc0e2bb0143 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.028015] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease: (returnval){ [ 2051.028015] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c8518f-3ab4-cbc3-a9e5-1b5dc717406c" [ 2051.028015] env[62816]: _type = "HttpNfcLease" [ 2051.028015] env[62816]: } obtained for exporting VM: (result){ [ 2051.028015] env[62816]: value = "vm-371221" [ 2051.028015] env[62816]: _type = "VirtualMachine" [ 2051.028015] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2051.028429] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the lease: (returnval){ [ 2051.028429] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c8518f-3ab4-cbc3-a9e5-1b5dc717406c" [ 2051.028429] env[62816]: _type = "HttpNfcLease" [ 2051.028429] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2051.034727] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2051.034727] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c8518f-3ab4-cbc3-a9e5-1b5dc717406c" [ 2051.034727] env[62816]: _type = "HttpNfcLease" [ 2051.034727] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2051.184758] env[62816]: DEBUG nova.compute.manager [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2051.232211] env[62816]: DEBUG nova.network.neutron [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Successfully created port: 77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2051.435858] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2051.536574] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2051.536574] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c8518f-3ab4-cbc3-a9e5-1b5dc717406c" [ 2051.536574] env[62816]: _type = "HttpNfcLease" [ 2051.536574] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2051.536900] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2051.536900] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52c8518f-3ab4-cbc3-a9e5-1b5dc717406c" [ 2051.536900] env[62816]: _type = "HttpNfcLease" [ 2051.536900] env[62816]: }. {{(pid=62816) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2051.537591] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6d648a-7bf0-4b6c-a400-903d7f1028db {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.544816] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52971013-c4b7-b10f-ebc0-5d15b323bfcf/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2051.545120] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52971013-c4b7-b10f-ebc0-5d15b323bfcf/disk-0.vmdk for reading. {{(pid=62816) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2051.640469] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bba17a41-566a-4052-9715-930145ea7d66 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.706245] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.706526] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.213018] env[62816]: INFO nova.compute.claims [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2052.447263] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2052.469961] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2052.470266] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2052.470508] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2052.470775] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2052.470974] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2052.471181] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2052.471503] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2052.471668] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2052.471840] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2052.472019] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2052.472220] env[62816]: DEBUG nova.virt.hardware [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2052.473350] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc002a3-5d78-4026-9111-ce7c8c377eb6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.481467] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c793d49-99a8-49c3-b50f-a5284dd4ff55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.627098] env[62816]: DEBUG nova.compute.manager [req-38ba7e3c-3c4b-4860-9bcf-1282b68ec6bf req-9d2abe46-8616-49c0-b8ea-72801fb1062a service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Received event network-vif-plugged-77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2052.627281] env[62816]: DEBUG oslo_concurrency.lockutils [req-38ba7e3c-3c4b-4860-9bcf-1282b68ec6bf req-9d2abe46-8616-49c0-b8ea-72801fb1062a service nova] Acquiring lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.627493] env[62816]: DEBUG oslo_concurrency.lockutils [req-38ba7e3c-3c4b-4860-9bcf-1282b68ec6bf req-9d2abe46-8616-49c0-b8ea-72801fb1062a service nova] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.627664] env[62816]: DEBUG oslo_concurrency.lockutils [req-38ba7e3c-3c4b-4860-9bcf-1282b68ec6bf req-9d2abe46-8616-49c0-b8ea-72801fb1062a service nova] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.627833] env[62816]: DEBUG nova.compute.manager [req-38ba7e3c-3c4b-4860-9bcf-1282b68ec6bf req-9d2abe46-8616-49c0-b8ea-72801fb1062a service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] No waiting events found dispatching network-vif-plugged-77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2052.627995] env[62816]: WARNING nova.compute.manager [req-38ba7e3c-3c4b-4860-9bcf-1282b68ec6bf req-9d2abe46-8616-49c0-b8ea-72801fb1062a service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Received unexpected event network-vif-plugged-77c83104-69e7-4415-b48f-1e9d34cca8e9 for instance with vm_state building and task_state spawning. [ 2052.719679] env[62816]: INFO nova.compute.resource_tracker [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating resource usage from migration 94c84fb6-40d1-4e65-8581-b5174a4776a2 [ 2052.729114] env[62816]: DEBUG nova.network.neutron [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Successfully updated port: 77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2052.824493] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-398abf89-0214-4749-89aa-1d98d0aaa45a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.832287] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf040ab8-e4cf-40b1-bee9-ca8f78810a55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.862571] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efc3c5aa-b73b-452f-ab64-17f10009d9c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.870241] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c1c4af-1a62-4ea5-b976-6397d167b4ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2052.883804] env[62816]: DEBUG nova.compute.provider_tree [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2053.232239] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2053.232450] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2053.232715] env[62816]: DEBUG nova.network.neutron [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2053.387186] env[62816]: DEBUG nova.scheduler.client.report [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2053.789204] env[62816]: DEBUG nova.network.neutron [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2053.892134] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.185s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.892393] env[62816]: INFO nova.compute.manager [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Migrating [ 2053.956847] env[62816]: DEBUG nova.network.neutron [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Updating instance_info_cache with network_info: [{"id": "77c83104-69e7-4415-b48f-1e9d34cca8e9", "address": "fa:16:3e:dc:7d:5d", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77c83104-69", "ovs_interfaceid": "77c83104-69e7-4415-b48f-1e9d34cca8e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2054.407727] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.407935] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.408226] env[62816]: DEBUG nova.network.neutron [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2054.459366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.459699] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Instance network_info: |[{"id": "77c83104-69e7-4415-b48f-1e9d34cca8e9", "address": "fa:16:3e:dc:7d:5d", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77c83104-69", "ovs_interfaceid": "77c83104-69e7-4415-b48f-1e9d34cca8e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2054.460217] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:7d:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77c83104-69e7-4415-b48f-1e9d34cca8e9', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2054.468163] env[62816]: DEBUG oslo.service.loopingcall [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2054.468438] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2054.468676] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3a2999d0-3004-4525-b338-fdd7463dba7d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.489021] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2054.489021] env[62816]: value = "task-1789406" [ 2054.489021] env[62816]: _type = "Task" [ 2054.489021] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.497186] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789406, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.715957] env[62816]: DEBUG nova.compute.manager [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Received event network-changed-77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2054.716228] env[62816]: DEBUG nova.compute.manager [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Refreshing instance network info cache due to event network-changed-77c83104-69e7-4415-b48f-1e9d34cca8e9. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2054.716453] env[62816]: DEBUG oslo_concurrency.lockutils [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] Acquiring lock "refresh_cache-1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.716601] env[62816]: DEBUG oslo_concurrency.lockutils [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] Acquired lock "refresh_cache-1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.716767] env[62816]: DEBUG nova.network.neutron [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Refreshing network info cache for port 77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2054.997950] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789406, 'name': CreateVM_Task, 'duration_secs': 0.427405} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.998262] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2054.998754] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2054.998924] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2054.999266] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2054.999520] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5c25328-02db-4c45-926c-fcaa21080914 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.003742] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2055.003742] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52617cfe-78dc-9862-9fa8-bd5822a223aa" [ 2055.003742] env[62816]: _type = "Task" [ 2055.003742] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.012781] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52617cfe-78dc-9862-9fa8-bd5822a223aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.100548] env[62816]: DEBUG nova.network.neutron [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [{"id": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "address": "fa:16:3e:ce:a1:cc", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1cc566-fc", "ovs_interfaceid": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.459264] env[62816]: DEBUG nova.network.neutron [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Updated VIF entry in instance network info cache for port 77c83104-69e7-4415-b48f-1e9d34cca8e9. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2055.459688] env[62816]: DEBUG nova.network.neutron [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Updating instance_info_cache with network_info: [{"id": "77c83104-69e7-4415-b48f-1e9d34cca8e9", "address": "fa:16:3e:dc:7d:5d", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77c83104-69", "ovs_interfaceid": "77c83104-69e7-4415-b48f-1e9d34cca8e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2055.515050] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52617cfe-78dc-9862-9fa8-bd5822a223aa, 'name': SearchDatastore_Task, 'duration_secs': 0.009332} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.515333] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.515570] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2055.515808] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2055.515983] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2055.516381] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2055.516697] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e653b76-8599-44e5-89ff-9a85a7a0d12b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.525318] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2055.525511] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2055.526286] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af937714-d756-42ea-8d69-0bcc4b902122 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.531333] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2055.531333] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f37e87-238c-362c-4400-6047dec4497d" [ 2055.531333] env[62816]: _type = "Task" [ 2055.531333] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2055.539590] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f37e87-238c-362c-4400-6047dec4497d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.603141] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2055.962380] env[62816]: DEBUG oslo_concurrency.lockutils [req-391ff7ce-2548-466b-baf4-51ec5e432b57 req-a4dd44b0-e919-45e4-826d-467fe2cfafec service nova] Releasing lock "refresh_cache-1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.042960] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f37e87-238c-362c-4400-6047dec4497d, 'name': SearchDatastore_Task, 'duration_secs': 0.008456} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.043804] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3185dc6-4ef4-4211-9c38-b1df0101f56c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.049297] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2056.049297] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fa48e5-c106-82c9-76cc-160a6fa210da" [ 2056.049297] env[62816]: _type = "Task" [ 2056.049297] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.057474] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fa48e5-c106-82c9-76cc-160a6fa210da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.559790] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fa48e5-c106-82c9-76cc-160a6fa210da, 'name': SearchDatastore_Task, 'duration_secs': 0.009627} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2056.560068] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2056.560332] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430/1c9dbe62-e1a0-429d-a22d-4ce8c51a8430.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2056.560603] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e2fff40-541c-482d-8c3d-d3987e6787ef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2056.567268] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2056.567268] env[62816]: value = "task-1789407" [ 2056.567268] env[62816]: _type = "Task" [ 2056.567268] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2056.575262] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.078749] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789407, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.117850] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870a9198-c986-4167-86d2-bca2ae1f4e02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.135726] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2057.578174] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789407, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.542835} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.578524] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430/1c9dbe62-e1a0-429d-a22d-4ce8c51a8430.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2057.578783] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2057.579109] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-398b98c2-7fa3-41d3-9a87-23c10636859d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.586422] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2057.586422] env[62816]: value = "task-1789408" [ 2057.586422] env[62816]: _type = "Task" [ 2057.586422] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.595615] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.642055] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2057.642055] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-34335342-cca3-4ca7-98c0-963709fe1c2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.648703] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2057.648703] env[62816]: value = "task-1789409" [ 2057.648703] env[62816]: _type = "Task" [ 2057.648703] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.657618] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789409, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.096424] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.094496} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.096903] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2058.097746] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4d5f02a-fac6-48f3-825d-7a6e7e856c77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.120314] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430/1c9dbe62-e1a0-429d-a22d-4ce8c51a8430.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2058.120706] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0af76a0c-0455-4b42-a2fc-70347df99bc5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.141024] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2058.141024] env[62816]: value = "task-1789410" [ 2058.141024] env[62816]: _type = "Task" [ 2058.141024] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.151385] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789410, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.159408] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789409, 'name': PowerOffVM_Task, 'duration_secs': 0.227233} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.159675] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2058.159861] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2058.524860] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.525106] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.525333] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2058.525522] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.525714] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.528017] env[62816]: INFO nova.compute.manager [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Terminating instance [ 2058.529992] env[62816]: DEBUG nova.compute.manager [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2058.530228] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2058.530465] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37ea7877-6b28-4093-83c4-72317d4096b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.537564] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2058.537564] env[62816]: value = "task-1789411" [ 2058.537564] env[62816]: _type = "Task" [ 2058.537564] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.547041] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789411, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.652225] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789410, 'name': ReconfigVM_Task, 'duration_secs': 0.29236} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.652670] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430/1c9dbe62-e1a0-429d-a22d-4ce8c51a8430.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2058.653308] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c340c675-7257-47ff-ae6e-537afc3b6b38 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.660020] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2058.660020] env[62816]: value = "task-1789412" [ 2058.660020] env[62816]: _type = "Task" [ 2058.660020] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.666425] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2058.666677] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2058.666813] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2058.667016] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2058.667223] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2058.667382] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2058.667593] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2058.667766] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2058.667922] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2058.668089] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2058.668281] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2058.677846] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23d17286-d049-45e6-a982-a1b675c5ef1a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.689113] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789412, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.694295] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2058.694295] env[62816]: value = "task-1789413" [ 2058.694295] env[62816]: _type = "Task" [ 2058.694295] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.704638] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.048206] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789411, 'name': PowerOffVM_Task, 'duration_secs': 0.196965} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.048458] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2059.048661] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Volume detach. Driver type: vmdk {{(pid=62816) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2059.048858] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371204', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'name': 'volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'c48238b9-7a8a-413c-92af-a0fa4b10fe04', 'attached_at': '2024-12-12T03:01:17.000000', 'detached_at': '', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'serial': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2059.049675] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2265825-16dc-4887-a93d-da7ef5ebd0a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.067939] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d50150-f5c8-47de-8735-a01edc08f363 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.074642] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3782c76c-2560-445b-86cf-b738e608ed28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.092532] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee73775-1217-4e6c-add8-e080794556bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.107567] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] The volume has not been displaced from its original location: [datastore1] volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3/volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3.vmdk. No consolidation needed. {{(pid=62816) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2059.112965] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfiguring VM instance instance-00000069 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2059.113303] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b62dbe-0c3c-460d-906e-931bbf30444f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.132646] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2059.132646] env[62816]: value = "task-1789414" [ 2059.132646] env[62816]: _type = "Task" [ 2059.132646] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.142506] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789414, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.169803] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789412, 'name': Rename_Task, 'duration_secs': 0.174019} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.170025] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2059.170301] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1e11c57-5753-4bcc-8052-7e38b296c4a6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.176905] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2059.176905] env[62816]: value = "task-1789415" [ 2059.176905] env[62816]: _type = "Task" [ 2059.176905] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.185023] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.203581] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789413, 'name': ReconfigVM_Task, 'duration_secs': 0.193934} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.204033] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2059.643231] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789414, 'name': ReconfigVM_Task, 'duration_secs': 0.184655} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.643618] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Reconfigured VM instance instance-00000069 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2059.648462] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90d079dc-d207-4480-b5e0-2c232700604e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.664290] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2059.664290] env[62816]: value = "task-1789416" [ 2059.664290] env[62816]: _type = "Task" [ 2059.664290] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.674371] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.686061] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789415, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.711187] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2059.711545] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2059.711835] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2059.712161] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2059.712334] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2059.712626] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2059.712897] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2059.713120] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2059.713424] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2059.713656] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2059.713939] env[62816]: DEBUG nova.virt.hardware [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2059.720136] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2059.720485] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bc61dab-a8fe-4f27-8fbc-4a44561c847a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.740100] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2059.740100] env[62816]: value = "task-1789417" [ 2059.740100] env[62816]: _type = "Task" [ 2059.740100] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.748464] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789417, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.158086] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52971013-c4b7-b10f-ebc0-5d15b323bfcf/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2060.159012] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834884ec-e06a-4699-877a-00c731bb72bc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.165442] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52971013-c4b7-b10f-ebc0-5d15b323bfcf/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2060.165609] env[62816]: ERROR oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52971013-c4b7-b10f-ebc0-5d15b323bfcf/disk-0.vmdk due to incomplete transfer. [ 2060.168364] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a6c2358c-70c6-4832-8e35-20aa2bd314d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.174588] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789416, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.175535] env[62816]: DEBUG oslo_vmware.rw_handles [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52971013-c4b7-b10f-ebc0-5d15b323bfcf/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2060.175735] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Uploaded image 342dc3e0-1510-4bf8-b31a-a9336941a298 to the Glance image server {{(pid=62816) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2060.178085] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Destroying the VM {{(pid=62816) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2060.178345] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0e3e0c31-b882-4e53-807c-efe919817c18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.186329] env[62816]: DEBUG oslo_vmware.api [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789415, 'name': PowerOnVM_Task, 'duration_secs': 0.579901} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.187394] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2060.187603] env[62816]: INFO nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Took 7.74 seconds to spawn the instance on the hypervisor. [ 2060.187786] env[62816]: DEBUG nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2060.188099] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2060.188099] env[62816]: value = "task-1789418" [ 2060.188099] env[62816]: _type = "Task" [ 2060.188099] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.188782] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8161d08a-ef9a-44ff-9a4c-71966f4691f9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.201274] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789418, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.249779] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789417, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.674861] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789416, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.699181] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789418, 'name': Destroy_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.708489] env[62816]: INFO nova.compute.manager [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Took 12.46 seconds to build instance. [ 2060.754020] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789417, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.175349] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789416, 'name': ReconfigVM_Task, 'duration_secs': 1.124884} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.175675] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-371204', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'name': 'volume-d04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': 'c48238b9-7a8a-413c-92af-a0fa4b10fe04', 'attached_at': '2024-12-12T03:01:17.000000', 'detached_at': '', 'volume_id': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3', 'serial': 'd04b1984-dcec-45fa-8a8d-eeff8eed3cb3'} {{(pid=62816) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2061.175973] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2061.176741] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be620bb-2cb9-4197-8984-98fa1c4c0da4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.183206] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2061.183404] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef65c4e5-8e90-443e-9c86-966d6f6c77f8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.201646] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789418, 'name': Destroy_Task} progress is 33%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.210423] env[62816]: DEBUG oslo_concurrency.lockutils [None req-eca260bb-159a-473c-ab4a-5640bef09728 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.973s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.251544] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789417, 'name': ReconfigVM_Task, 'duration_secs': 1.354813} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.251858] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2061.252705] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bcb959-b6ef-4d71-aa59-4b74b89073af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.274326] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f/a17c2b1f-47f2-4076-8e99-55e8189e952f.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2061.274573] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb9ade64-e51b-4d0f-b2e7-25eb0ca37c26 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.293464] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2061.293464] env[62816]: value = "task-1789420" [ 2061.293464] env[62816]: _type = "Task" [ 2061.293464] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.302794] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789420, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.308092] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2061.308322] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2061.308504] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] c48238b9-7a8a-413c-92af-a0fa4b10fe04 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2061.308765] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3e6b7a14-3122-4cfe-bd44-8cf3afed8f5e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.314788] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2061.314788] env[62816]: value = "task-1789421" [ 2061.314788] env[62816]: _type = "Task" [ 2061.314788] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.322618] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789421, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.521586] env[62816]: DEBUG oslo_concurrency.lockutils [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.521882] env[62816]: DEBUG oslo_concurrency.lockutils [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2061.522196] env[62816]: DEBUG nova.compute.manager [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2061.523140] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da22a651-1ede-4e3d-803e-a29c61a9c01b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.529988] env[62816]: DEBUG nova.compute.manager [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2061.530587] env[62816]: DEBUG nova.objects.instance [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'flavor' on Instance uuid 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2061.701990] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789418, 'name': Destroy_Task, 'duration_secs': 1.212958} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.702235] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Destroyed the VM [ 2061.702474] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deleting Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2061.702718] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-59e0b5eb-d2fb-4bfe-8de8-005f019652d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.708918] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2061.708918] env[62816]: value = "task-1789422" [ 2061.708918] env[62816]: _type = "Task" [ 2061.708918] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.716370] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789422, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.803804] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789420, 'name': ReconfigVM_Task, 'duration_secs': 0.259327} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.804100] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Reconfigured VM instance instance-0000006d to attach disk [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f/a17c2b1f-47f2-4076-8e99-55e8189e952f.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2061.804386] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2061.823189] env[62816]: DEBUG oslo_vmware.api [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789421, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091575} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.823466] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2061.823680] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2061.823877] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2061.824086] env[62816]: INFO nova.compute.manager [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Took 3.29 seconds to destroy the instance on the hypervisor. [ 2061.824330] env[62816]: DEBUG oslo.service.loopingcall [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2061.824551] env[62816]: DEBUG nova.compute.manager [-] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2061.824651] env[62816]: DEBUG nova.network.neutron [-] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2062.036431] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2062.036696] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c59f336-3340-48e2-a2db-5f4cb5345875 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.044108] env[62816]: DEBUG oslo_vmware.api [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2062.044108] env[62816]: value = "task-1789423" [ 2062.044108] env[62816]: _type = "Task" [ 2062.044108] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.054190] env[62816]: DEBUG oslo_vmware.api [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.218716] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789422, 'name': RemoveSnapshot_Task, 'duration_secs': 0.349812} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.219234] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deleted Snapshot of the VM instance {{(pid=62816) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2062.219311] env[62816]: DEBUG nova.compute.manager [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2062.220095] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba142b03-0a13-4c1e-ad5c-3c9855bc129e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.312228] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2d1d05-e655-457c-bdab-e30f308a86dd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.332947] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe30202f-949a-42ce-9c38-1fe33f80072b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.336420] env[62816]: DEBUG nova.compute.manager [req-b9233447-748f-4b34-b110-e324e26c1be3 req-e55b5980-9f70-43ca-ac51-92f7eca906d4 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Received event network-vif-deleted-dc37e042-ff36-48c9-81a1-a3669e102aae {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2062.336611] env[62816]: INFO nova.compute.manager [req-b9233447-748f-4b34-b110-e324e26c1be3 req-e55b5980-9f70-43ca-ac51-92f7eca906d4 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Neutron deleted interface dc37e042-ff36-48c9-81a1-a3669e102aae; detaching it from the instance and deleting it from the info cache [ 2062.336788] env[62816]: DEBUG nova.network.neutron [req-b9233447-748f-4b34-b110-e324e26c1be3 req-e55b5980-9f70-43ca-ac51-92f7eca906d4 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2062.354133] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2062.554336] env[62816]: DEBUG oslo_vmware.api [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789423, 'name': PowerOffVM_Task, 'duration_secs': 0.229283} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.557609] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2062.557609] env[62816]: DEBUG nova.compute.manager [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2062.557609] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5725e7f-fe94-47ed-bf2b-95f7f5eefbe7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.733065] env[62816]: INFO nova.compute.manager [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Shelve offloading [ 2062.734099] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2062.734353] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18c3e2dc-deec-4f96-ab61-cdda2eb8cb4c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.740724] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2062.740724] env[62816]: value = "task-1789424" [ 2062.740724] env[62816]: _type = "Task" [ 2062.740724] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.748690] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789424, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.790320] env[62816]: DEBUG nova.network.neutron [-] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2062.839926] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d682352-1afb-4bb8-9e46-e85d8c74610a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.849930] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c71a00-e851-47e8-9e3d-46bd4017601c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.881555] env[62816]: DEBUG nova.compute.manager [req-b9233447-748f-4b34-b110-e324e26c1be3 req-e55b5980-9f70-43ca-ac51-92f7eca906d4 service nova] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Detach interface failed, port_id=dc37e042-ff36-48c9-81a1-a3669e102aae, reason: Instance c48238b9-7a8a-413c-92af-a0fa4b10fe04 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2062.897197] env[62816]: DEBUG nova.network.neutron [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Port aa1cc566-fcd4-44bc-a585-ddab737b1a55 binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2063.068620] env[62816]: DEBUG oslo_concurrency.lockutils [None req-314cb521-57a3-41c4-9587-0d6e322723d5 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.547s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.250813] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] VM already powered off {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2063.251089] env[62816]: DEBUG nova.compute.manager [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2063.251781] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074a0d30-eb01-4418-a11a-3b2bec009b15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.257260] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2063.257427] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2063.257600] env[62816]: DEBUG nova.network.neutron [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2063.292764] env[62816]: INFO nova.compute.manager [-] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Took 1.47 seconds to deallocate network for instance. [ 2063.834315] env[62816]: INFO nova.compute.manager [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Took 0.54 seconds to detach 1 volumes for instance. [ 2063.836816] env[62816]: DEBUG nova.compute.manager [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Deleting volume: d04b1984-dcec-45fa-8a8d-eeff8eed3cb3 {{(pid=62816) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2063.923086] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.923350] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.923595] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.924696] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.924944] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.925177] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.925390] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2063.925588] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.927920] env[62816]: INFO nova.compute.manager [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Terminating instance [ 2063.929858] env[62816]: DEBUG nova.compute.manager [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2063.930105] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2063.931048] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55441ba8-40fe-4d42-a53b-d097036e2ad6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.939698] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2063.939949] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fa5acb08-f2fa-4202-94a1-945c611d95b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.008610] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2064.008839] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2064.009087] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleting the datastore file [datastore1] 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2064.009374] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68614bb4-3d22-47ac-9107-97e2f7577e3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.014440] env[62816]: DEBUG nova.network.neutron [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.016811] env[62816]: DEBUG oslo_vmware.api [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2064.016811] env[62816]: value = "task-1789427" [ 2064.016811] env[62816]: _type = "Task" [ 2064.016811] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.024790] env[62816]: DEBUG oslo_vmware.api [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789427, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.376904] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.377314] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.377431] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.401212] env[62816]: INFO nova.scheduler.client.report [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted allocations for instance c48238b9-7a8a-413c-92af-a0fa4b10fe04 [ 2064.457261] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2064.457440] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2064.517978] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2064.528523] env[62816]: DEBUG oslo_vmware.api [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789427, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.257167} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.528669] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2064.528858] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2064.529698] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2064.529698] env[62816]: INFO nova.compute.manager [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2064.529698] env[62816]: DEBUG oslo.service.loopingcall [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2064.529971] env[62816]: DEBUG nova.compute.manager [-] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2064.529971] env[62816]: DEBUG nova.network.neutron [-] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2064.828604] env[62816]: DEBUG nova.compute.manager [req-b3e69a41-1850-4014-ac5e-70fa81a13e5b req-b32d5837-44dc-49f7-a769-f1190a8acd09 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-vif-unplugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2064.828604] env[62816]: DEBUG oslo_concurrency.lockutils [req-b3e69a41-1850-4014-ac5e-70fa81a13e5b req-b32d5837-44dc-49f7-a769-f1190a8acd09 service nova] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2064.828604] env[62816]: DEBUG oslo_concurrency.lockutils [req-b3e69a41-1850-4014-ac5e-70fa81a13e5b req-b32d5837-44dc-49f7-a769-f1190a8acd09 service nova] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2064.828604] env[62816]: DEBUG oslo_concurrency.lockutils [req-b3e69a41-1850-4014-ac5e-70fa81a13e5b req-b32d5837-44dc-49f7-a769-f1190a8acd09 service nova] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.828604] env[62816]: DEBUG nova.compute.manager [req-b3e69a41-1850-4014-ac5e-70fa81a13e5b req-b32d5837-44dc-49f7-a769-f1190a8acd09 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] No waiting events found dispatching network-vif-unplugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2064.829213] env[62816]: WARNING nova.compute.manager [req-b3e69a41-1850-4014-ac5e-70fa81a13e5b req-b32d5837-44dc-49f7-a769-f1190a8acd09 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received unexpected event network-vif-unplugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 for instance with vm_state shelved and task_state shelving_offloading. [ 2064.862913] env[62816]: DEBUG nova.compute.manager [req-8c05892e-63df-4598-912e-d5daa7af3912 req-746bfd80-81b2-4658-9547-88662de3c438 service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Received event network-vif-deleted-77c83104-69e7-4415-b48f-1e9d34cca8e9 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2064.862913] env[62816]: INFO nova.compute.manager [req-8c05892e-63df-4598-912e-d5daa7af3912 req-746bfd80-81b2-4658-9547-88662de3c438 service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Neutron deleted interface 77c83104-69e7-4415-b48f-1e9d34cca8e9; detaching it from the instance and deleting it from the info cache [ 2064.862913] env[62816]: DEBUG nova.network.neutron [req-8c05892e-63df-4598-912e-d5daa7af3912 req-746bfd80-81b2-4658-9547-88662de3c438 service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.888339] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2064.889768] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c98e173-e6ad-442c-876d-ce8df5e6a39b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.897025] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2064.897254] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7d21e71-1d9d-4f36-ac7e-4d5ca439292e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.907683] env[62816]: DEBUG oslo_concurrency.lockutils [None req-97bf4267-55c3-42e9-96c7-daac099ddbc6 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "c48238b9-7a8a-413c-92af-a0fa4b10fe04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.383s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2064.958043] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.958268] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.958415] env[62816]: DEBUG nova.network.neutron [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2065.004374] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2065.004592] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2065.004779] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleting the datastore file [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.005054] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a90203c-c1e4-4b9a-9e8e-769f7e8fea05 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.012131] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2065.012131] env[62816]: value = "task-1789429" [ 2065.012131] env[62816]: _type = "Task" [ 2065.012131] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.019556] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789429, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.232327] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.232603] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.232817] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2065.233015] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2065.233213] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2065.236011] env[62816]: INFO nova.compute.manager [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Terminating instance [ 2065.237731] env[62816]: DEBUG nova.compute.manager [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2065.237927] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2065.238748] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b48f1871-dec8-4d19-a3e0-a28c88762da1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.246075] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2065.246320] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cff3934a-9832-4f9c-90c0-b8f472266a55 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.252829] env[62816]: DEBUG oslo_vmware.api [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2065.252829] env[62816]: value = "task-1789430" [ 2065.252829] env[62816]: _type = "Task" [ 2065.252829] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.260630] env[62816]: DEBUG oslo_vmware.api [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789430, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.341442] env[62816]: DEBUG nova.network.neutron [-] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.364856] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed9a6597-b955-4361-b1f0-3f54509a7276 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.373680] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9792205-12db-4004-84ce-be656368b91f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.399918] env[62816]: DEBUG nova.compute.manager [req-8c05892e-63df-4598-912e-d5daa7af3912 req-746bfd80-81b2-4658-9547-88662de3c438 service nova] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Detach interface failed, port_id=77c83104-69e7-4415-b48f-1e9d34cca8e9, reason: Instance 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2065.465887] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Skipping network cache update for instance because it is being deleted. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9978}} [ 2065.522405] env[62816]: DEBUG oslo_vmware.api [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789429, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161481} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.522665] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2065.522850] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2065.523128] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2065.547111] env[62816]: INFO nova.scheduler.client.report [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted allocations for instance 5e6be756-2dba-4977-aad2-61c5e97dc761 [ 2065.676163] env[62816]: DEBUG nova.network.neutron [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [{"id": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "address": "fa:16:3e:ce:a1:cc", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1cc566-fc", "ovs_interfaceid": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.763315] env[62816]: DEBUG oslo_vmware.api [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789430, 'name': PowerOffVM_Task, 'duration_secs': 0.202281} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.763580] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2065.763754] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2065.764018] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-320c2af5-1782-4336-8aae-2ea8af978064 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.840172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2065.840410] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2065.840596] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] 5e76d63c-b05c-4e8b-8b90-6110bd7d654c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.840864] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3e5043a-3020-4c00-9c06-34dc7683df35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.845406] env[62816]: INFO nova.compute.manager [-] [instance: 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430] Took 1.32 seconds to deallocate network for instance. [ 2065.848971] env[62816]: DEBUG oslo_vmware.api [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2065.848971] env[62816]: value = "task-1789432" [ 2065.848971] env[62816]: _type = "Task" [ 2065.848971] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.860985] env[62816]: DEBUG oslo_vmware.api [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.053405] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.053647] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2066.053903] env[62816]: DEBUG nova.objects.instance [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'resources' on Instance uuid 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2066.179943] env[62816]: DEBUG oslo_concurrency.lockutils [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.357340] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2066.363347] env[62816]: DEBUG oslo_vmware.api [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144494} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.363619] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2066.363810] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2066.363993] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2066.364324] env[62816]: INFO nova.compute.manager [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2066.364501] env[62816]: DEBUG oslo.service.loopingcall [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2066.364697] env[62816]: DEBUG nova.compute.manager [-] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2066.364793] env[62816]: DEBUG nova.network.neutron [-] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2066.471034] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2066.471403] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.471629] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.471772] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.471905] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2066.556294] env[62816]: DEBUG nova.objects.instance [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'numa_topology' on Instance uuid 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2066.701312] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ba786f-1a42-4d55-8313-7d217ca1f732 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.721472] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505ae6c3-190e-4995-82e6-becc60427409 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.728336] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2066.856648] env[62816]: DEBUG nova.compute.manager [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2066.856849] env[62816]: DEBUG nova.compute.manager [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing instance network info cache due to event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2066.857255] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.857441] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.857615] env[62816]: DEBUG nova.network.neutron [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2066.981762] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] There are 29 instances to clean {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2066.983646] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 152b3bdd-82d6-4c8a-9fbd-b220d212cfb4] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2067.058340] env[62816]: DEBUG nova.objects.base [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Object Instance<5e6be756-2dba-4977-aad2-61c5e97dc761> lazy-loaded attributes: resources,numa_topology {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2067.103166] env[62816]: DEBUG nova.network.neutron [-] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.131459] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-631356f1-bac6-4f63-8e9c-84078dbba8c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.138849] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a93bca0-3dbb-4091-9fad-bfa07a66ccce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.171046] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99522363-b270-4144-aeed-c2f876071479 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.178224] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8bd2d84-bcbd-4667-b7f4-bc3b00be6d11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.190910] env[62816]: DEBUG nova.compute.provider_tree [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2067.234083] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2067.234310] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-407bee6e-1174-4f66-a708-a83871c2f1a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.240883] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2067.240883] env[62816]: value = "task-1789433" [ 2067.240883] env[62816]: _type = "Task" [ 2067.240883] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2067.248071] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789433, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2067.409399] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2067.485125] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c48238b9-7a8a-413c-92af-a0fa4b10fe04] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2067.605620] env[62816]: INFO nova.compute.manager [-] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Took 1.24 seconds to deallocate network for instance. [ 2067.633967] env[62816]: DEBUG nova.network.neutron [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updated VIF entry in instance network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2067.634418] env[62816]: DEBUG nova.network.neutron [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": null, "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.694310] env[62816]: DEBUG nova.scheduler.client.report [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2067.750905] env[62816]: DEBUG oslo_vmware.api [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789433, 'name': PowerOnVM_Task, 'duration_secs': 0.396383} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.751167] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2067.751416] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-c32845fd-2741-45b7-b2d8-1222f742e42c tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance 'a17c2b1f-47f2-4076-8e99-55e8189e952f' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2067.988395] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 40bddbd1-9fa6-4dfb-9131-6c376f9417de] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2068.114823] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.137536] env[62816]: DEBUG oslo_concurrency.lockutils [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2068.137811] env[62816]: DEBUG nova.compute.manager [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Received event network-vif-deleted-738d894d-6a65-4c5d-891f-2c14246c82f4 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2068.137994] env[62816]: INFO nova.compute.manager [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Neutron deleted interface 738d894d-6a65-4c5d-891f-2c14246c82f4; detaching it from the instance and deleting it from the info cache [ 2068.138186] env[62816]: DEBUG nova.network.neutron [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.199228] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.145s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.201475] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.844s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.201721] env[62816]: DEBUG nova.objects.instance [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'resources' on Instance uuid 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2068.492265] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 35852805-5776-4b65-96aa-4365b32c66d5] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2068.640479] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef6abd05-d10d-418c-9681-b30b4489e4de {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.650209] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c3d555-cb52-421a-b247-6f7d0201c352 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.674172] env[62816]: DEBUG nova.compute.manager [req-bb64090c-713a-4049-8b30-d648f90c17d3 req-e0dbf5d8-1219-4227-894b-0eca2d6ee452 service nova] [instance: 5e76d63c-b05c-4e8b-8b90-6110bd7d654c] Detach interface failed, port_id=738d894d-6a65-4c5d-891f-2c14246c82f4, reason: Instance 5e76d63c-b05c-4e8b-8b90-6110bd7d654c could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2068.709579] env[62816]: DEBUG oslo_concurrency.lockutils [None req-366bb8a4-47bd-40b6-b2c7-f7e612efdae0 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.336s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2068.710463] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.301s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.710645] env[62816]: INFO nova.compute.manager [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Unshelving [ 2068.785759] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62de2570-a096-4f62-bdb2-c0edde161f9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.793339] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba96fd13-c149-4d90-8e94-b7f1385d4d14 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.823590] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a7146d-b100-424d-ae4c-d7805d06a118 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.831107] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b16d0b-72b2-4eaf-9398-f027797114ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.845805] env[62816]: DEBUG nova.compute.provider_tree [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.996096] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: fc6ed02d-7bf2-4ef6-bb1b-05832bc720b4] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2069.348774] env[62816]: DEBUG nova.scheduler.client.report [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2069.499323] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: cb0a9fc4-6809-4ce9-9521-eb1a115493cf] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2069.733368] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.765224] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2069.765473] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.765665] env[62816]: DEBUG nova.compute.manager [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Going to confirm migration 6 {{(pid=62816) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2069.853799] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.652s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.856696] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.741s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.856696] env[62816]: DEBUG nova.objects.instance [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'resources' on Instance uuid 5e76d63c-b05c-4e8b-8b90-6110bd7d654c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2069.872939] env[62816]: INFO nova.scheduler.client.report [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocations for instance 1c9dbe62-e1a0-429d-a22d-4ce8c51a8430 [ 2070.002465] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 5b87e09d-ae08-4936-8479-c845e25b31b4] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2070.335634] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2070.335835] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2070.336069] env[62816]: DEBUG nova.network.neutron [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2070.336274] env[62816]: DEBUG nova.objects.instance [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'info_cache' on Instance uuid a17c2b1f-47f2-4076-8e99-55e8189e952f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2070.379831] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e7adad75-a8bc-4485-a07a-95905ac968a7 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "1c9dbe62-e1a0-429d-a22d-4ce8c51a8430" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.455s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.424873] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7530c167-14ef-4135-be27-7f6917baabec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.432552] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bdf220-3c1f-435d-b986-8c12bc5e4cf4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.463492] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01208294-dd13-42a1-84b7-bd67637308d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.471109] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0e0ccb-2507-46b0-9259-4df5e5b06f46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.484678] env[62816]: DEBUG nova.compute.provider_tree [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2070.505320] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 341bf195-e528-4e3b-8636-fac7a383d228] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2070.851852] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2070.852106] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.987715] env[62816]: DEBUG nova.scheduler.client.report [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2071.008748] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 251b3ce3-06a4-40d4-ba18-a217650c9152] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2071.354471] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2071.492425] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.636s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.494456] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.761s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.494679] env[62816]: DEBUG nova.objects.instance [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'pci_requests' on Instance uuid 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2071.511283] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 6f0c72ab-1eaf-4db5-842f-b0ba75739e66] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2071.513697] env[62816]: INFO nova.scheduler.client.report [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted allocations for instance 5e76d63c-b05c-4e8b-8b90-6110bd7d654c [ 2071.582421] env[62816]: DEBUG nova.network.neutron [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [{"id": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "address": "fa:16:3e:ce:a1:cc", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaa1cc566-fc", "ovs_interfaceid": "aa1cc566-fcd4-44bc-a585-ddab737b1a55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.872475] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.998987] env[62816]: DEBUG nova.objects.instance [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'numa_topology' on Instance uuid 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2072.015214] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 37cb03ea-2e94-4466-89c0-2e3f7fdac076] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2072.022626] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8999014c-9966-499f-939b-10edc8826d44 tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "5e76d63c-b05c-4e8b-8b90-6110bd7d654c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.790s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.084989] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-a17c2b1f-47f2-4076-8e99-55e8189e952f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.085283] env[62816]: DEBUG nova.objects.instance [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'migration_context' on Instance uuid a17c2b1f-47f2-4076-8e99-55e8189e952f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2072.501258] env[62816]: INFO nova.compute.claims [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2072.518576] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: aa4a7eae-7891-4516-97d9-ba5ec5dd4c4d] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2072.587845] env[62816]: DEBUG nova.objects.base [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2072.588763] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9deb8a47-29bf-4a45-8a27-91bb1c600aac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.607842] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87e8ffcc-8b66-4d31-a785-e933473af554 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.613326] env[62816]: DEBUG oslo_vmware.api [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2072.613326] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52fada13-f921-2beb-4b9e-61b70c492910" [ 2072.613326] env[62816]: _type = "Task" [ 2072.613326] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.620783] env[62816]: DEBUG oslo_vmware.api [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fada13-f921-2beb-4b9e-61b70c492910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.020867] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 642a07d7-8d15-4874-9dbe-bb9aa29e4d8a] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2073.123499] env[62816]: DEBUG oslo_vmware.api [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52fada13-f921-2beb-4b9e-61b70c492910, 'name': SearchDatastore_Task, 'duration_secs': 0.008928} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.123499] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.523537] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: e26b6593-7e64-4a43-b09d-92d2e668c25b] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2073.576968] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bdbd63-9ca5-4f3e-802c-381e396fdb74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.584739] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfba4d4f-6bf8-4a5b-9ffd-5940c8d88fee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.615291] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b50464e-7980-4501-96ee-b432d19a24b7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.622249] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00013948-5b06-4a7d-b16b-0232b3e42dbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.635219] env[62816]: DEBUG nova.compute.provider_tree [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2074.027061] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c9ebcce1-8374-46fb-996f-c271cb8dbf84] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2074.138061] env[62816]: DEBUG nova.scheduler.client.report [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2074.531068] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: bd5482f1-8884-49fa-9e9c-7873eadeefe0] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2074.642992] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.148s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.645614] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.773s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.647151] env[62816]: INFO nova.compute.claims [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2074.676354] env[62816]: INFO nova.network.neutron [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating port eadfcc8c-606b-4352-8ce4-4ad681cc07c6 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2075.034104] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 913bba01-e64b-4b52-af94-5effcefc2677] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2075.538603] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: fa719ff5-0219-485f-aac7-2cde4bbef8f6] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2075.722893] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7eb039-674c-4416-9a9c-c6165282c838 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.730626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a03acf8-030d-4010-b053-46dd12c205ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.759279] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c51f17e-f997-4e76-95d3-cbf638910637 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.766602] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57ee54d-6fa0-4c31-a5dc-0e2cfb6a6262 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.779557] env[62816]: DEBUG nova.compute.provider_tree [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.917454] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "4ab07a21-2685-42bc-af13-b95473993d6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.917722] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "4ab07a21-2685-42bc-af13-b95473993d6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.917929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "4ab07a21-2685-42bc-af13-b95473993d6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2075.918129] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "4ab07a21-2685-42bc-af13-b95473993d6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2075.918304] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "4ab07a21-2685-42bc-af13-b95473993d6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.920382] env[62816]: INFO nova.compute.manager [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Terminating instance [ 2075.922061] env[62816]: DEBUG nova.compute.manager [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2075.922222] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2075.923037] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bac93d1-278f-4cb8-bf7e-53427fbc99cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.930439] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2075.930661] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e69cea54-615d-4485-9789-6ed67ba369eb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.936449] env[62816]: DEBUG oslo_vmware.api [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2075.936449] env[62816]: value = "task-1789435" [ 2075.936449] env[62816]: _type = "Task" [ 2075.936449] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.944010] env[62816]: DEBUG oslo_vmware.api [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.040471] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: a50b78c5-bb7e-4038-9a74-ecde2042828f] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2076.112801] env[62816]: DEBUG nova.compute.manager [req-8deb9480-9f2d-4a0f-b3d7-e06396fcbf71 req-75ac0165-4fac-4611-b912-2109eed96d7f service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-vif-plugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2076.112997] env[62816]: DEBUG oslo_concurrency.lockutils [req-8deb9480-9f2d-4a0f-b3d7-e06396fcbf71 req-75ac0165-4fac-4611-b912-2109eed96d7f service nova] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.113238] env[62816]: DEBUG oslo_concurrency.lockutils [req-8deb9480-9f2d-4a0f-b3d7-e06396fcbf71 req-75ac0165-4fac-4611-b912-2109eed96d7f service nova] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.113461] env[62816]: DEBUG oslo_concurrency.lockutils [req-8deb9480-9f2d-4a0f-b3d7-e06396fcbf71 req-75ac0165-4fac-4611-b912-2109eed96d7f service nova] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.113640] env[62816]: DEBUG nova.compute.manager [req-8deb9480-9f2d-4a0f-b3d7-e06396fcbf71 req-75ac0165-4fac-4611-b912-2109eed96d7f service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] No waiting events found dispatching network-vif-plugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2076.113837] env[62816]: WARNING nova.compute.manager [req-8deb9480-9f2d-4a0f-b3d7-e06396fcbf71 req-75ac0165-4fac-4611-b912-2109eed96d7f service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received unexpected event network-vif-plugged-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 for instance with vm_state shelved_offloaded and task_state spawning. [ 2076.194441] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.194578] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.194769] env[62816]: DEBUG nova.network.neutron [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2076.283177] env[62816]: DEBUG nova.scheduler.client.report [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2076.445567] env[62816]: DEBUG oslo_vmware.api [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789435, 'name': PowerOffVM_Task, 'duration_secs': 0.188126} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.445791] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2076.445993] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2076.446249] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-420aa63c-aa4e-4bdf-97a9-06fda906f5e8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.543529] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: f97ea34e-792e-4023-bd2f-549dba129925] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2076.577851] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2076.578027] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2076.578243] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleting the datastore file [datastore1] 4ab07a21-2685-42bc-af13-b95473993d6f {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2076.578533] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-475651bc-956d-4d58-85c3-7cd0d6ef6f95 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.585023] env[62816]: DEBUG oslo_vmware.api [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for the task: (returnval){ [ 2076.585023] env[62816]: value = "task-1789437" [ 2076.585023] env[62816]: _type = "Task" [ 2076.585023] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.593022] env[62816]: DEBUG oslo_vmware.api [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789437, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.787780] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.142s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.788339] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2076.792855] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 3.669s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.914399] env[62816]: DEBUG nova.network.neutron [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.046472] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9972b167-a950-4dba-ac02-068f66300053] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2077.094930] env[62816]: DEBUG oslo_vmware.api [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Task: {'id': task-1789437, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137375} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.095211] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.095402] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2077.095585] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2077.095760] env[62816]: INFO nova.compute.manager [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Took 1.17 seconds to destroy the instance on the hypervisor. [ 2077.096048] env[62816]: DEBUG oslo.service.loopingcall [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.096253] env[62816]: DEBUG nova.compute.manager [-] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2077.096350] env[62816]: DEBUG nova.network.neutron [-] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2077.296731] env[62816]: DEBUG nova.compute.utils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2077.302974] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2077.303277] env[62816]: DEBUG nova.network.neutron [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2077.348615] env[62816]: DEBUG nova.policy [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c5023a18d243b7aafb6d6181f931d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e070d1729247ff83b4ff6997b45385', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2077.377058] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183fabb6-d9c7-4a92-9c09-3ea570d535b6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.384767] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad1f0b80-73d4-4877-ab84-a8975401b836 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.415927] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a8efdf-9272-476e-bb91-e2c208ad5ddf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.418795] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.426478] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c154abc-2197-4417-b852-36f180f85079 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.440880] env[62816]: DEBUG nova.compute.provider_tree [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2077.453750] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='3a75175e1bd92008dd2c1f56c1436bb0',container_format='bare',created_at=2024-12-12T03:01:42Z,direct_url=,disk_format='vmdk',id=342dc3e0-1510-4bf8-b31a-a9336941a298,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1871885252-shelved',owner='8c54ea5a5abf4f0298b76f6081de8e60',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-12-12T03:01:56Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2077.453989] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2077.454165] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2077.454404] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2077.454586] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2077.454742] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2077.454955] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2077.455134] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2077.455304] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2077.455467] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2077.455640] env[62816]: DEBUG nova.virt.hardware [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2077.456703] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e717ea6-492b-4db7-be5b-17293dd6d966 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.464271] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a80ad7-6687-49f3-93df-0ac09c43627d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.478131] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:e2:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '53955f0e-c162-4cef-8bd5-335b369c36b6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eadfcc8c-606b-4352-8ce4-4ad681cc07c6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2077.485587] env[62816]: DEBUG oslo.service.loopingcall [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.485871] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2077.486115] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff478716-a3ed-4f8f-909d-f1700c66cbf6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.504691] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2077.504691] env[62816]: value = "task-1789438" [ 2077.504691] env[62816]: _type = "Task" [ 2077.504691] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.512215] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789438, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.550230] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: b9e8af08-9579-4dbf-8ea1-35ffab75e159] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2077.651109] env[62816]: DEBUG nova.network.neutron [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Successfully created port: 1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2077.802824] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2077.805388] env[62816]: DEBUG nova.network.neutron [-] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.962047] env[62816]: ERROR nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [req-8d279774-d69d-4889-b1a4-8b05329604ef] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8d279774-d69d-4889-b1a4-8b05329604ef"}]} [ 2077.978189] env[62816]: DEBUG nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2077.991890] env[62816]: DEBUG nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2077.992140] env[62816]: DEBUG nova.compute.provider_tree [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2078.002995] env[62816]: DEBUG nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2078.013547] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789438, 'name': CreateVM_Task, 'duration_secs': 0.331004} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.013709] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2078.014388] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.014562] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.014922] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2078.015186] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-085ac36a-44e7-4be7-af63-056100222176 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.020083] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2078.020083] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52e08389-d27f-f59d-b67e-901d36e2cc35" [ 2078.020083] env[62816]: _type = "Task" [ 2078.020083] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.020910] env[62816]: DEBUG nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2078.032898] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52e08389-d27f-f59d-b67e-901d36e2cc35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.053878] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: d03ed540-5c20-4bcb-ac7e-eec8c09e4451] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2078.089476] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b573ba7e-9f80-48a9-aecd-922546877ab1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.097119] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bd812ba-ca29-44c5-89ba-786ad1b797f5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.127908] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86820e3b-c5da-42e6-9f8f-e1fee5644c49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.135551] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1113382d-74df-4217-9b97-7cf8680c6ad2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.141595] env[62816]: DEBUG nova.compute.manager [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2078.141792] env[62816]: DEBUG nova.compute.manager [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing instance network info cache due to event network-changed-eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2078.142045] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] Acquiring lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.142200] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] Acquired lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.142363] env[62816]: DEBUG nova.network.neutron [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Refreshing network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2078.153366] env[62816]: DEBUG nova.compute.provider_tree [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2078.312435] env[62816]: INFO nova.compute.manager [-] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Took 1.22 seconds to deallocate network for instance. [ 2078.534576] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.534576] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Processing image 342dc3e0-1510-4bf8-b31a-a9336941a298 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2078.534800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.534973] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.535186] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2078.535430] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17c3466b-a4ae-4101-9eb1-d9e016134a6a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.544042] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2078.544247] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2078.544933] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c330059a-1efd-434b-babc-45babeeb4681 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.549815] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2078.549815] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52473e0e-6fb0-fc04-15ac-a33c96af813e" [ 2078.549815] env[62816]: _type = "Task" [ 2078.549815] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.557211] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52473e0e-6fb0-fc04-15ac-a33c96af813e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.558506] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 543d69d2-0694-4d57-bbae-f8851ff0f0dc] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2078.682552] env[62816]: DEBUG nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 157 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2078.682862] env[62816]: DEBUG nova.compute.provider_tree [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 157 to 158 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2078.683061] env[62816]: DEBUG nova.compute.provider_tree [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2078.814175] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2078.818356] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.843824] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2078.844120] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2078.844286] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2078.844469] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2078.844619] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2078.844767] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2078.844977] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2078.845157] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2078.845334] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2078.845499] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2078.845676] env[62816]: DEBUG nova.virt.hardware [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2078.846846] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ae3e45-017c-442a-b5a7-9b03c28fbe0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.855586] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6b7a123-6cff-4d96-baa7-bd9767bf1a11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.883215] env[62816]: DEBUG nova.network.neutron [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updated VIF entry in instance network info cache for port eadfcc8c-606b-4352-8ce4-4ad681cc07c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2078.883513] env[62816]: DEBUG nova.network.neutron [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [{"id": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "address": "fa:16:3e:ab:e2:a6", "network": {"id": "8317c3c2-8054-44e4-a037-e17801dd59c7", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-1289537171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8c54ea5a5abf4f0298b76f6081de8e60", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "53955f0e-c162-4cef-8bd5-335b369c36b6", "external-id": "nsx-vlan-transportzone-623", "segmentation_id": 623, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeadfcc8c-60", "ovs_interfaceid": "eadfcc8c-606b-4352-8ce4-4ad681cc07c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.060449] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Preparing fetch location {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2079.060712] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Fetch image to [datastore1] OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145/OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145.vmdk {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2079.060901] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Downloading stream optimized image 342dc3e0-1510-4bf8-b31a-a9336941a298 to [datastore1] OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145/OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145.vmdk on the data store datastore1 as vApp {{(pid=62816) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2079.061091] env[62816]: DEBUG nova.virt.vmwareapi.images [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Downloading image file data 342dc3e0-1510-4bf8-b31a-a9336941a298 to the ESX as VM named 'OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145' {{(pid=62816) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2079.064721] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: c66fa160-d4dd-429f-8751-f36cb2020ff1] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2079.125971] env[62816]: DEBUG nova.network.neutron [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Successfully updated port: 1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2079.128573] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2079.128573] env[62816]: value = "resgroup-9" [ 2079.128573] env[62816]: _type = "ResourcePool" [ 2079.128573] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2079.128829] env[62816]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-bc448c76-6f4c-45d0-85c8-3993e97bbbf7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.149544] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease: (returnval){ [ 2079.149544] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52565eb2-6cc2-cd34-f18c-19e719e42066" [ 2079.149544] env[62816]: _type = "HttpNfcLease" [ 2079.149544] env[62816]: } obtained for vApp import into resource pool (val){ [ 2079.149544] env[62816]: value = "resgroup-9" [ 2079.149544] env[62816]: _type = "ResourcePool" [ 2079.149544] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2079.149840] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the lease: (returnval){ [ 2079.149840] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52565eb2-6cc2-cd34-f18c-19e719e42066" [ 2079.149840] env[62816]: _type = "HttpNfcLease" [ 2079.149840] env[62816]: } to be ready. {{(pid=62816) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2079.156080] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2079.156080] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52565eb2-6cc2-cd34-f18c-19e719e42066" [ 2079.156080] env[62816]: _type = "HttpNfcLease" [ 2079.156080] env[62816]: } is initializing. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2079.386643] env[62816]: DEBUG oslo_concurrency.lockutils [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] Releasing lock "refresh_cache-5e6be756-2dba-4977-aad2-61c5e97dc761" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.386924] env[62816]: DEBUG nova.compute.manager [req-d9344b3e-7ec5-4ac4-bbfe-2d8e2cedbceb req-9e9f0919-92fd-450c-8d76-26c7af4dc4bd service nova] [instance: 4ab07a21-2685-42bc-af13-b95473993d6f] Received event network-vif-deleted-c1c7e341-ffdc-440b-8b2a-6dff7559b1bd {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2079.567624] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: dd833e38-691c-4757-9c6b-659c74343d3e] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2079.628394] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2079.628551] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.628648] env[62816]: DEBUG nova.network.neutron [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2079.658021] env[62816]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2079.658021] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52565eb2-6cc2-cd34-f18c-19e719e42066" [ 2079.658021] env[62816]: _type = "HttpNfcLease" [ 2079.658021] env[62816]: } is ready. {{(pid=62816) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2079.658264] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2079.658264] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52565eb2-6cc2-cd34-f18c-19e719e42066" [ 2079.658264] env[62816]: _type = "HttpNfcLease" [ 2079.658264] env[62816]: }. {{(pid=62816) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2079.659636] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94be8e10-1a45-471a-9f15-6f3fb4612285 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.666936] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52694d6c-9ce2-15c1-fba5-8e5a8627132c/disk-0.vmdk from lease info. {{(pid=62816) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2079.667652] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52694d6c-9ce2-15c1-fba5-8e5a8627132c/disk-0.vmdk. {{(pid=62816) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2079.726345] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.933s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.730380] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.912s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.730669] env[62816]: DEBUG nova.objects.instance [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lazy-loading 'resources' on Instance uuid 4ab07a21-2685-42bc-af13-b95473993d6f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2079.737577] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b6239e32-3207-4c9e-bdd9-7012a721d201 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.072030] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 9745413b-2bd8-45d7-8491-483e4921b59c] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2080.217518] env[62816]: DEBUG nova.compute.manager [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Received event network-vif-plugged-1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2080.217748] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] Acquiring lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.217962] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.218373] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.218579] env[62816]: DEBUG nova.compute.manager [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] No waiting events found dispatching network-vif-plugged-1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2080.218775] env[62816]: WARNING nova.compute.manager [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Received unexpected event network-vif-plugged-1408cc61-c46f-4333-bd79-7dec976cea2f for instance with vm_state building and task_state spawning. [ 2080.218954] env[62816]: DEBUG nova.compute.manager [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Received event network-changed-1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2080.219163] env[62816]: DEBUG nova.compute.manager [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Refreshing instance network info cache due to event network-changed-1408cc61-c46f-4333-bd79-7dec976cea2f. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2080.219371] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] Acquiring lock "refresh_cache-87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2080.220254] env[62816]: DEBUG nova.network.neutron [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2080.289720] env[62816]: INFO nova.scheduler.client.report [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocation for migration 94c84fb6-40d1-4e65-8581-b5174a4776a2 [ 2080.322056] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87cf70aa-c91e-4555-9e5c-9c6091da99ae {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.330265] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196c313e-0155-4bce-87d1-0ee3b3fa34fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.371071] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c00075-5218-4f79-becd-329c937573bd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.377525] env[62816]: DEBUG nova.network.neutron [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Updating instance_info_cache with network_info: [{"id": "1408cc61-c46f-4333-bd79-7dec976cea2f", "address": "fa:16:3e:71:13:cf", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1408cc61-c4", "ovs_interfaceid": "1408cc61-c46f-4333-bd79-7dec976cea2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.385378] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b1718e-b597-4c17-b70c-dc26edb222d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.401999] env[62816]: DEBUG nova.compute.provider_tree [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2080.578094] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 8105e650-8482-40c6-bd7a-b8daea19a0d5] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2080.800866] env[62816]: DEBUG oslo_concurrency.lockutils [None req-b72976dc-e986-426c-bb1d-0021a43375bf tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.035s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.880449] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2080.880838] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Instance network_info: |[{"id": "1408cc61-c46f-4333-bd79-7dec976cea2f", "address": "fa:16:3e:71:13:cf", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1408cc61-c4", "ovs_interfaceid": "1408cc61-c46f-4333-bd79-7dec976cea2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2080.881199] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] Acquired lock "refresh_cache-87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2080.881385] env[62816]: DEBUG nova.network.neutron [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Refreshing network info cache for port 1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2080.882873] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:13:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1408cc61-c46f-4333-bd79-7dec976cea2f', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2080.891179] env[62816]: DEBUG oslo.service.loopingcall [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.896396] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2080.897469] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-185dc53c-dfe6-4318-b8d6-efb2909205be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.913188] env[62816]: DEBUG nova.scheduler.client.report [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2080.922677] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2080.922677] env[62816]: value = "task-1789440" [ 2080.922677] env[62816]: _type = "Task" [ 2080.922677] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.933919] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789440, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.001341] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Completed reading data from the image iterator. {{(pid=62816) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2081.001499] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52694d6c-9ce2-15c1-fba5-8e5a8627132c/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2081.002459] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca7a765-4277-4634-b21f-ea92d2f02a5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.010800] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52694d6c-9ce2-15c1-fba5-8e5a8627132c/disk-0.vmdk is in state: ready. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2081.011303] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52694d6c-9ce2-15c1-fba5-8e5a8627132c/disk-0.vmdk. {{(pid=62816) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2081.011303] env[62816]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-6275b1e5-c0f1-43a8-a28e-bb84f218293b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.081698] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 31ac8296-14fa-46f7-b825-c31904b832d5] Instance has had 0 of 5 cleanup attempts {{(pid=62816) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2081.187998] env[62816]: DEBUG nova.network.neutron [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Updated VIF entry in instance network info cache for port 1408cc61-c46f-4333-bd79-7dec976cea2f. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2081.188501] env[62816]: DEBUG nova.network.neutron [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Updating instance_info_cache with network_info: [{"id": "1408cc61-c46f-4333-bd79-7dec976cea2f", "address": "fa:16:3e:71:13:cf", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1408cc61-c4", "ovs_interfaceid": "1408cc61-c46f-4333-bd79-7dec976cea2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.318704] env[62816]: DEBUG oslo_vmware.rw_handles [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52694d6c-9ce2-15c1-fba5-8e5a8627132c/disk-0.vmdk. {{(pid=62816) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2081.318704] env[62816]: INFO nova.virt.vmwareapi.images [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Downloaded image file data 342dc3e0-1510-4bf8-b31a-a9336941a298 [ 2081.318704] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf590d0-316d-44c8-a3ac-298e2f6f2667 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.337485] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72f2236d-6643-4fee-b322-e91f20588dc0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.359512] env[62816]: INFO nova.virt.vmwareapi.images [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] The imported VM was unregistered [ 2081.361897] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Caching image {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2081.362193] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Creating directory with path [datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2081.362534] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da814476-19fb-4a9d-a20f-b84482d311ab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.371594] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Created directory with path [datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298 {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2081.371771] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145/OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145.vmdk to [datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk. {{(pid=62816) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2081.372028] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-1d8d170a-737d-467d-846d-7d7d44ddb647 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.377711] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2081.377711] env[62816]: value = "task-1789442" [ 2081.377711] env[62816]: _type = "Task" [ 2081.377711] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.385296] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.419393] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.689s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.432129] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789440, 'name': CreateVM_Task, 'duration_secs': 0.404274} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.432336] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2081.433056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.433163] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.433462] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2081.433729] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-253be234-abf4-427d-a146-be2e98b57154 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.438405] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2081.438405] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f372f0-8200-fc4c-b0e1-8d32de0ba17a" [ 2081.438405] env[62816]: _type = "Task" [ 2081.438405] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.442316] env[62816]: INFO nova.scheduler.client.report [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Deleted allocations for instance 4ab07a21-2685-42bc-af13-b95473993d6f [ 2081.451774] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f372f0-8200-fc4c-b0e1-8d32de0ba17a, 'name': SearchDatastore_Task, 'duration_secs': 0.010539} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.452231] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.452372] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2081.452558] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.453275] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.453275] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2081.453275] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8dd5a4f9-a5bc-4332-bbbd-c3a795a64371 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.460509] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2081.461296] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2081.461692] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f2a84bb-6ccd-43d1-a645-099c584c7d59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.466836] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2081.466836] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5287d6db-e23f-178d-7211-056aea80b504" [ 2081.466836] env[62816]: _type = "Task" [ 2081.466836] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.474406] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5287d6db-e23f-178d-7211-056aea80b504, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.584984] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.584984] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Cleaning up deleted instances with incomplete migration {{(pid=62816) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2081.690826] env[62816]: DEBUG oslo_concurrency.lockutils [req-a4a16da2-ce14-4ffc-97c1-79923f268345 req-e4f36131-9852-426b-aca7-415613dc8283 service nova] Releasing lock "refresh_cache-87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.888463] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.950162] env[62816]: DEBUG oslo_concurrency.lockutils [None req-345196f2-1031-4b04-8b67-1fd2a4c1f1fe tempest-ServerActionsTestOtherA-968988627 tempest-ServerActionsTestOtherA-968988627-project-member] Lock "4ab07a21-2685-42bc-af13-b95473993d6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.032s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.978197] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5287d6db-e23f-178d-7211-056aea80b504, 'name': SearchDatastore_Task, 'duration_secs': 0.009564} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.979036] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74377c2d-ca07-4cbc-860d-87bdfaf5556b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.984545] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2081.984545] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a28c54-4254-7bd6-543e-a265b4f9c855" [ 2081.984545] env[62816]: _type = "Task" [ 2081.984545] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.993813] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a28c54-4254-7bd6-543e-a265b4f9c855, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.087666] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.391665] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.496815] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a28c54-4254-7bd6-543e-a265b4f9c855, 'name': SearchDatastore_Task, 'duration_secs': 0.073258} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.497191] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.497513] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f/87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2082.498015] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a73a602b-d036-4ccb-bbd4-29274c0b77d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.500745] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.501721] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.501721] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.501721] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.501721] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.503674] env[62816]: INFO nova.compute.manager [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Terminating instance [ 2082.505448] env[62816]: DEBUG nova.compute.manager [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2082.505727] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2082.506587] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f194757-f0ca-45c4-a187-d6ae01bdfdfa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.511086] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2082.511086] env[62816]: value = "task-1789443" [ 2082.511086] env[62816]: _type = "Task" [ 2082.511086] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.517120] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2082.517756] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f86746d0-2ba5-4887-85d7-52b176457bde {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.522377] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.526986] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2082.526986] env[62816]: value = "task-1789444" [ 2082.526986] env[62816]: _type = "Task" [ 2082.526986] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.536141] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.889181] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.977411] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.977692] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.021223] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.038433] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789444, 'name': PowerOffVM_Task, 'duration_secs': 0.293712} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.038713] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2083.038882] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2083.039162] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48a0b539-9cf6-4806-be31-8a5bcd3953b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.165702] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2083.166182] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2083.166438] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] a17c2b1f-47f2-4076-8e99-55e8189e952f {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2083.166738] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b3cf2a6-b78e-4341-a641-611486babeff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.174031] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2083.174031] env[62816]: value = "task-1789446" [ 2083.174031] env[62816]: _type = "Task" [ 2083.174031] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.182680] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.390024] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task} progress is 77%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.480988] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2083.522429] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.575186] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.575464] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.575849] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.576085] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.576265] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.576416] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2083.576569] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2083.684726] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.891801] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task} progress is 97%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.006662] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.006977] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.008435] env[62816]: INFO nova.compute.claims [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2084.024015] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.080289] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.185109] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.390810] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789442, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.678558} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.391097] env[62816]: INFO nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145/OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145.vmdk to [datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk. [ 2084.391357] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Cleaning up location [datastore1] OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2084.391538] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c07710a2-1253-4992-94b0-3f813c50c145 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2084.391799] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-861a02cc-a793-46a2-8be4-86be4640aa77 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.398548] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2084.398548] env[62816]: value = "task-1789447" [ 2084.398548] env[62816]: _type = "Task" [ 2084.398548] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.406473] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789447, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.523956] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.686357] env[62816]: DEBUG oslo_vmware.api [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.087567} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.686609] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2084.686820] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2084.687014] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2084.687196] env[62816]: INFO nova.compute.manager [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Took 2.18 seconds to destroy the instance on the hypervisor. [ 2084.687428] env[62816]: DEBUG oslo.service.loopingcall [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2084.687612] env[62816]: DEBUG nova.compute.manager [-] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2084.687710] env[62816]: DEBUG nova.network.neutron [-] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2084.908500] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789447, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033028} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.908804] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2084.908918] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.909178] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk to [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2084.909425] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4db27ed-c0d6-47f0-92b4-27edd2ab4d83 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.916610] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2084.916610] env[62816]: value = "task-1789448" [ 2084.916610] env[62816]: _type = "Task" [ 2084.916610] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.924084] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.961978] env[62816]: DEBUG nova.compute.manager [req-e5865e41-cde2-4e2c-bb05-72126936f720 req-71a103b0-0667-4fcf-b9e3-79255460b995 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Received event network-vif-deleted-aa1cc566-fcd4-44bc-a585-ddab737b1a55 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2084.962202] env[62816]: INFO nova.compute.manager [req-e5865e41-cde2-4e2c-bb05-72126936f720 req-71a103b0-0667-4fcf-b9e3-79255460b995 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Neutron deleted interface aa1cc566-fcd4-44bc-a585-ddab737b1a55; detaching it from the instance and deleting it from the info cache [ 2084.962379] env[62816]: DEBUG nova.network.neutron [req-e5865e41-cde2-4e2c-bb05-72126936f720 req-71a103b0-0667-4fcf-b9e3-79255460b995 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.025668] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.078522] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efac5da5-a630-4746-83fa-c28c25a69380 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.085786] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b54eeb4-643f-49db-a2dd-9533a2a88e3f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.114801] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8fcfad-6791-45e4-87db-959eb085df13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.121927] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80de4a1c-4133-4ccb-a94f-04110a551b31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.135600] env[62816]: DEBUG nova.compute.provider_tree [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2085.428016] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789448, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.440555] env[62816]: DEBUG nova.network.neutron [-] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.465304] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-015668a8-5a4d-40c3-bdce-027783792554 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.475578] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be48daa9-bcf7-4620-b278-0f6472635daa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.502371] env[62816]: DEBUG nova.compute.manager [req-e5865e41-cde2-4e2c-bb05-72126936f720 req-71a103b0-0667-4fcf-b9e3-79255460b995 service nova] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Detach interface failed, port_id=aa1cc566-fcd4-44bc-a585-ddab737b1a55, reason: Instance a17c2b1f-47f2-4076-8e99-55e8189e952f could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2085.524021] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789443, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.750085} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.524250] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f/87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2085.524497] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2085.524769] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0dafc2c-1bf6-43b0-89aa-68456d39c4f2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.531719] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2085.531719] env[62816]: value = "task-1789449" [ 2085.531719] env[62816]: _type = "Task" [ 2085.531719] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.540721] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789449, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.656555] env[62816]: ERROR nova.scheduler.client.report [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [req-106900a3-7bdc-4e7d-aa96-50c446172171] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-106900a3-7bdc-4e7d-aa96-50c446172171"}]} [ 2085.675957] env[62816]: DEBUG nova.scheduler.client.report [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2085.690674] env[62816]: DEBUG nova.scheduler.client.report [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2085.690912] env[62816]: DEBUG nova.compute.provider_tree [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2085.702820] env[62816]: DEBUG nova.scheduler.client.report [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2085.722062] env[62816]: DEBUG nova.scheduler.client.report [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2085.787814] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63df5d9d-ef43-41ef-9b79-03869fda852c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.795308] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77170f8a-2af3-4420-b690-e05042acb96c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.824744] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847969f6-7180-4a5d-b1be-76bf8696f260 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.832356] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739426f5-c544-471f-863c-488a2804056b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.847345] env[62816]: DEBUG nova.compute.provider_tree [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2085.926684] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789448, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.943670] env[62816]: INFO nova.compute.manager [-] [instance: a17c2b1f-47f2-4076-8e99-55e8189e952f] Took 1.26 seconds to deallocate network for instance. [ 2086.047110] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789449, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1025} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.047434] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2086.048278] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a555d64-e7c0-43c9-ba67-e71779b106e4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.071996] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f/87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2086.072362] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-049a11c8-413a-4361-8161-2eb250b50e9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.092440] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2086.092440] env[62816]: value = "task-1789450" [ 2086.092440] env[62816]: _type = "Task" [ 2086.092440] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.101679] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.379094] env[62816]: DEBUG nova.scheduler.client.report [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2086.379402] env[62816]: DEBUG nova.compute.provider_tree [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 160 to 161 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2086.379588] env[62816]: DEBUG nova.compute.provider_tree [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2086.428742] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789448, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.451096] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.603831] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.884898] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.878s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.885425] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2086.888175] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.808s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.888338] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.888493] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2086.888792] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.438s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.889024] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.891968] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e98ee5f-1280-4d6a-8e01-d14429042da3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.900505] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19cb872b-fa56-44d3-8043-cb3605a4ca04 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.916292] env[62816]: INFO nova.scheduler.client.report [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocations for instance a17c2b1f-47f2-4076-8e99-55e8189e952f [ 2086.918500] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6972c514-b300-48e7-8641-b14d1002af7a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.937478] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa431133-a695-4b07-a446-963409e0d8c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.941087] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789448, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.970823] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180559MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2086.971030] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.971269] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.103993] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789450, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.393230] env[62816]: DEBUG nova.compute.utils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2087.394878] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2087.395122] env[62816]: DEBUG nova.network.neutron [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2087.435486] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789448, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.455029} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.436987] env[62816]: DEBUG nova.policy [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f53618eedbd4be28d440e1cbd81a8fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53b24724dc3344f0b4206a015e34f2e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2087.438600] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f726f6a0-b3a8-457a-affa-81e04ab32311 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "a17c2b1f-47f2-4076-8e99-55e8189e952f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.938s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.439509] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/342dc3e0-1510-4bf8-b31a-a9336941a298/342dc3e0-1510-4bf8-b31a-a9336941a298.vmdk to [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2087.440684] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e70cc1-0b66-437c-9d86-cac24bf71baa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.463563] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2087.464106] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a43ed39-9fb4-4f85-ac11-7aeb42afdd0d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.490044] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2087.490044] env[62816]: value = "task-1789451" [ 2087.490044] env[62816]: _type = "Task" [ 2087.490044] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.499445] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789451, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.606496] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789450, 'name': ReconfigVM_Task, 'duration_secs': 1.427113} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.606496] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f/87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2087.606994] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33b01df1-551d-462b-8d8c-eabe4912da78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.613291] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2087.613291] env[62816]: value = "task-1789452" [ 2087.613291] env[62816]: _type = "Task" [ 2087.613291] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.621637] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789452, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.704167] env[62816]: DEBUG nova.network.neutron [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Successfully created port: 610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2087.899357] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2087.999490] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789451, 'name': ReconfigVM_Task, 'duration_secs': 0.273428} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.999787] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761/5e6be756-2dba-4977-aad2-61c5e97dc761.vmdk or device None with type streamOptimized {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2088.000404] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bce58645-3c7b-4109-b9cb-487fcf0be34b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.002648] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 5e6be756-2dba-4977-aad2-61c5e97dc761 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2088.002790] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2088.002917] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 4b2e9f7f-b090-4547-bdd0-d4516fcc7589 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2088.003128] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2088.003288] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2088.010474] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2088.010474] env[62816]: value = "task-1789453" [ 2088.010474] env[62816]: _type = "Task" [ 2088.010474] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.018834] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789453, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.055562] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525ab178-930b-4b82-a470-00a5e6bacae2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.062932] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3354693-75ad-4a6e-b60d-3a643616506f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.092427] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2584d03-2c18-4eb3-8a37-531543c431f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.099833] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209653d3-db33-4017-bc52-5bb7fd2c8772 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.112635] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2088.121537] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789452, 'name': Rename_Task, 'duration_secs': 0.151418} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.122396] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2088.122646] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be3adb94-ee20-46ec-867b-0514430a65b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.128660] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2088.128660] env[62816]: value = "task-1789454" [ 2088.128660] env[62816]: _type = "Task" [ 2088.128660] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.136283] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789454, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.520505] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789453, 'name': Rename_Task, 'duration_secs': 0.137439} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.520734] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2088.520989] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9ab22e6d-29b3-4218-8424-157ddb35ad47 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.527738] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2088.527738] env[62816]: value = "task-1789455" [ 2088.527738] env[62816]: _type = "Task" [ 2088.527738] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.535542] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.638226] env[62816]: DEBUG oslo_vmware.api [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789454, 'name': PowerOnVM_Task, 'duration_secs': 0.455904} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.638558] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2088.638734] env[62816]: INFO nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Took 9.82 seconds to spawn the instance on the hypervisor. [ 2088.638981] env[62816]: DEBUG nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2088.639860] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889727d7-1fc5-4893-a20c-55a4432d81ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.648493] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 161 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2088.648710] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 161 to 162 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2088.648837] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2088.910796] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2088.937863] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2088.938204] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2088.938385] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2088.938580] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2088.938732] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2088.938883] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2088.939106] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2088.939270] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2088.939438] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2088.939602] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2088.939773] env[62816]: DEBUG nova.virt.hardware [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2088.940992] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d973227c-6556-4ab5-b4d0-ef52b3399cc8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.949311] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ccc00f-2475-45ab-9dbf-29569f414853 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.037906] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789455, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.125838] env[62816]: DEBUG nova.compute.manager [req-89fb680d-8758-456a-b654-4395de964073 req-a03aa33a-c951-4298-9b0b-03604692a4be service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Received event network-vif-plugged-610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2089.126048] env[62816]: DEBUG oslo_concurrency.lockutils [req-89fb680d-8758-456a-b654-4395de964073 req-a03aa33a-c951-4298-9b0b-03604692a4be service nova] Acquiring lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2089.126264] env[62816]: DEBUG oslo_concurrency.lockutils [req-89fb680d-8758-456a-b654-4395de964073 req-a03aa33a-c951-4298-9b0b-03604692a4be service nova] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.126434] env[62816]: DEBUG oslo_concurrency.lockutils [req-89fb680d-8758-456a-b654-4395de964073 req-a03aa33a-c951-4298-9b0b-03604692a4be service nova] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.126640] env[62816]: DEBUG nova.compute.manager [req-89fb680d-8758-456a-b654-4395de964073 req-a03aa33a-c951-4298-9b0b-03604692a4be service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] No waiting events found dispatching network-vif-plugged-610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2089.126833] env[62816]: WARNING nova.compute.manager [req-89fb680d-8758-456a-b654-4395de964073 req-a03aa33a-c951-4298-9b0b-03604692a4be service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Received unexpected event network-vif-plugged-610e9246-a2bd-4611-a6cb-be369b3e41df for instance with vm_state building and task_state spawning. [ 2089.156238] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2089.160014] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.185s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.160014] env[62816]: INFO nova.compute.manager [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Took 17.30 seconds to build instance. [ 2089.208575] env[62816]: DEBUG nova.network.neutron [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Successfully updated port: 610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2089.541334] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789455, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.660855] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d27f8080-28fa-411f-9f6c-b8ca3eaf460c tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.808s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.714113] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2089.714272] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2089.714655] env[62816]: DEBUG nova.network.neutron [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2089.894996] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9c4570-1c78-4f08-b10e-af6b93806418 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.901978] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Suspending the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2089.902217] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-d8048479-03f2-47f6-bbda-4378fadf9fa7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.908176] env[62816]: DEBUG oslo_vmware.api [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2089.908176] env[62816]: value = "task-1789456" [ 2089.908176] env[62816]: _type = "Task" [ 2089.908176] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.915877] env[62816]: DEBUG oslo_vmware.api [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789456, 'name': SuspendVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.042102] env[62816]: DEBUG oslo_vmware.api [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789455, 'name': PowerOnVM_Task, 'duration_secs': 1.023897} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.042519] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2090.154930] env[62816]: DEBUG nova.compute.manager [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2090.156103] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8858d4b8-2c3c-44dd-93fb-0dd17f3e692a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.248482] env[62816]: DEBUG nova.network.neutron [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2090.392988] env[62816]: DEBUG nova.network.neutron [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [{"id": "610e9246-a2bd-4611-a6cb-be369b3e41df", "address": "fa:16:3e:94:f3:a3", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap610e9246-a2", "ovs_interfaceid": "610e9246-a2bd-4611-a6cb-be369b3e41df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.417913] env[62816]: DEBUG oslo_vmware.api [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789456, 'name': SuspendVM_Task} progress is 70%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.673097] env[62816]: DEBUG oslo_concurrency.lockutils [None req-54257b3c-58be-4f0c-95fa-5ab06afc7b1b tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 21.963s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.895685] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.896078] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Instance network_info: |[{"id": "610e9246-a2bd-4611-a6cb-be369b3e41df", "address": "fa:16:3e:94:f3:a3", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap610e9246-a2", "ovs_interfaceid": "610e9246-a2bd-4611-a6cb-be369b3e41df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2090.896533] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:f3:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '610e9246-a2bd-4611-a6cb-be369b3e41df', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2090.904060] env[62816]: DEBUG oslo.service.loopingcall [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2090.904295] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2090.904521] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3941691e-c226-46b8-9ad4-346f4ab35869 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.927262] env[62816]: DEBUG oslo_vmware.api [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789456, 'name': SuspendVM_Task, 'duration_secs': 0.615603} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.928475] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Suspended the VM {{(pid=62816) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2090.928706] env[62816]: DEBUG nova.compute.manager [None req-e3ed5804-2a68-4cca-8ea9-1bb3858250ff tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2090.928974] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2090.928974] env[62816]: value = "task-1789457" [ 2090.928974] env[62816]: _type = "Task" [ 2090.928974] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.929683] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46fe6d0-a2ea-4d0b-9902-f54165cf5873 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.939184] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789457, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.155773] env[62816]: DEBUG nova.compute.manager [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Received event network-changed-610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2091.156169] env[62816]: DEBUG nova.compute.manager [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Refreshing instance network info cache due to event network-changed-610e9246-a2bd-4611-a6cb-be369b3e41df. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2091.156431] env[62816]: DEBUG oslo_concurrency.lockutils [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] Acquiring lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.156582] env[62816]: DEBUG oslo_concurrency.lockutils [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] Acquired lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.156756] env[62816]: DEBUG nova.network.neutron [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Refreshing network info cache for port 610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2091.441574] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789457, 'name': CreateVM_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.842044] env[62816]: DEBUG nova.network.neutron [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updated VIF entry in instance network info cache for port 610e9246-a2bd-4611-a6cb-be369b3e41df. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2091.842433] env[62816]: DEBUG nova.network.neutron [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [{"id": "610e9246-a2bd-4611-a6cb-be369b3e41df", "address": "fa:16:3e:94:f3:a3", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap610e9246-a2", "ovs_interfaceid": "610e9246-a2bd-4611-a6cb-be369b3e41df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.926579] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.926868] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.927095] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.927286] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.927456] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.929445] env[62816]: INFO nova.compute.manager [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Terminating instance [ 2091.931085] env[62816]: DEBUG nova.compute.manager [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2091.931286] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2091.932144] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a3dd24-9d35-4453-b6c7-a42abf24dfd8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.943155] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789457, 'name': CreateVM_Task, 'duration_secs': 0.574826} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.945045] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2091.945315] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2091.945895] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.946069] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.946379] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2091.946592] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2720ac2b-8d46-4f3a-8dbe-b95fdd602bfb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.947878] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78691940-fff5-4a8b-8b90-fe1c2383bb4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.952421] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2091.952421] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cb88fd-96a9-61c2-0f5d-d1096a7aab31" [ 2091.952421] env[62816]: _type = "Task" [ 2091.952421] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.956228] env[62816]: DEBUG oslo_vmware.api [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2091.956228] env[62816]: value = "task-1789458" [ 2091.956228] env[62816]: _type = "Task" [ 2091.956228] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.962714] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb88fd-96a9-61c2-0f5d-d1096a7aab31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.968032] env[62816]: DEBUG oslo_vmware.api [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789458, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.281521] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.281842] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.282042] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2092.282242] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.282417] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.284584] env[62816]: INFO nova.compute.manager [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Terminating instance [ 2092.286334] env[62816]: DEBUG nova.compute.manager [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2092.286527] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2092.287390] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d6778c0-f3b1-4ea6-bbb5-5cc5d9a91432 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.295023] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2092.295249] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6eb81ad8-fa02-45dc-b0bf-2d033face55e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.344883] env[62816]: DEBUG oslo_concurrency.lockutils [req-2adc48ca-5216-46b9-9b2f-4dd504009d9d req-64900b16-1191-443e-bfe4-1d472bc55e82 service nova] Releasing lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.375500] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2092.375755] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2092.375952] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleting the datastore file [datastore1] 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2092.376234] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b62e319-7dfd-4207-a74f-5353bb4c4266 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.382018] env[62816]: DEBUG oslo_vmware.api [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2092.382018] env[62816]: value = "task-1789460" [ 2092.382018] env[62816]: _type = "Task" [ 2092.382018] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.389540] env[62816]: DEBUG oslo_vmware.api [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.463909] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb88fd-96a9-61c2-0f5d-d1096a7aab31, 'name': SearchDatastore_Task, 'duration_secs': 0.010386} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.464549] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.464785] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2092.465034] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.465188] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.465365] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2092.465610] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-38018f38-2d46-4f14-ab5d-ec67fa5c992b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.469887] env[62816]: DEBUG oslo_vmware.api [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789458, 'name': PowerOffVM_Task, 'duration_secs': 0.16955} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.470462] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2092.470636] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2092.470889] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d814523-5a7b-435e-ba94-b9ab2d55558f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.475941] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2092.476128] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2092.476791] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38e3c33a-1bbc-4a7a-a334-ce91832b963c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.481843] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2092.481843] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52076090-d4a2-6a3d-46fb-270846d4fe71" [ 2092.481843] env[62816]: _type = "Task" [ 2092.481843] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.488728] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52076090-d4a2-6a3d-46fb-270846d4fe71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.542554] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2092.542778] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2092.542960] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleting the datastore file [datastore1] 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2092.543243] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-52fcd6d7-a89a-4802-80b3-3bccb994bdf8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.549897] env[62816]: DEBUG oslo_vmware.api [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for the task: (returnval){ [ 2092.549897] env[62816]: value = "task-1789462" [ 2092.549897] env[62816]: _type = "Task" [ 2092.549897] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.559767] env[62816]: DEBUG oslo_vmware.api [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.895448] env[62816]: DEBUG oslo_vmware.api [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137138} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.895857] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2092.896183] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2092.896493] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2092.896807] env[62816]: INFO nova.compute.manager [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2092.897210] env[62816]: DEBUG oslo.service.loopingcall [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2092.897509] env[62816]: DEBUG nova.compute.manager [-] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2092.897673] env[62816]: DEBUG nova.network.neutron [-] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2092.992085] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52076090-d4a2-6a3d-46fb-270846d4fe71, 'name': SearchDatastore_Task, 'duration_secs': 0.00763} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.992850] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94f8e7e0-894c-4716-9c74-3cc792451f6d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.997800] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2092.997800] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5295e129-1914-9546-bd0e-023a6e0fb88f" [ 2092.997800] env[62816]: _type = "Task" [ 2092.997800] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.005345] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5295e129-1914-9546-bd0e-023a6e0fb88f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.059558] env[62816]: DEBUG oslo_vmware.api [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Task: {'id': task-1789462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154213} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.059863] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2093.060069] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2093.060248] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2093.060422] env[62816]: INFO nova.compute.manager [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2093.060657] env[62816]: DEBUG oslo.service.loopingcall [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2093.060849] env[62816]: DEBUG nova.compute.manager [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2093.060942] env[62816]: DEBUG nova.network.neutron [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2093.191142] env[62816]: DEBUG nova.compute.manager [req-d3baf079-c45a-471d-91be-4b09c0385e06 req-eeb56506-bbfe-492e-a179-e5fe04168766 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Received event network-vif-deleted-1408cc61-c46f-4333-bd79-7dec976cea2f {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2093.191382] env[62816]: INFO nova.compute.manager [req-d3baf079-c45a-471d-91be-4b09c0385e06 req-eeb56506-bbfe-492e-a179-e5fe04168766 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Neutron deleted interface 1408cc61-c46f-4333-bd79-7dec976cea2f; detaching it from the instance and deleting it from the info cache [ 2093.191595] env[62816]: DEBUG nova.network.neutron [req-d3baf079-c45a-471d-91be-4b09c0385e06 req-eeb56506-bbfe-492e-a179-e5fe04168766 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.511811] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5295e129-1914-9546-bd0e-023a6e0fb88f, 'name': SearchDatastore_Task, 'duration_secs': 0.00926} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.513489] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.513821] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589/4b2e9f7f-b090-4547-bdd0-d4516fcc7589.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2093.515308] env[62816]: DEBUG nova.compute.manager [req-803cac53-9df7-4f62-b2fa-ae8fecab6689 req-8a46909c-7ee1-429b-b633-f5d6441bb288 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Received event network-vif-deleted-eadfcc8c-606b-4352-8ce4-4ad681cc07c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2093.515512] env[62816]: INFO nova.compute.manager [req-803cac53-9df7-4f62-b2fa-ae8fecab6689 req-8a46909c-7ee1-429b-b633-f5d6441bb288 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Neutron deleted interface eadfcc8c-606b-4352-8ce4-4ad681cc07c6; detaching it from the instance and deleting it from the info cache [ 2093.515715] env[62816]: DEBUG nova.network.neutron [req-803cac53-9df7-4f62-b2fa-ae8fecab6689 req-8a46909c-7ee1-429b-b633-f5d6441bb288 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.517125] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72a8cda5-1469-4d53-b80f-9ce23e621fdb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.526317] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2093.526317] env[62816]: value = "task-1789463" [ 2093.526317] env[62816]: _type = "Task" [ 2093.526317] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.535996] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789463, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.664878] env[62816]: DEBUG nova.network.neutron [-] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.696067] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a70b91e9-4255-4e75-b4aa-9633316572e7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.706304] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba07cd6-8eef-43c4-a62f-0a2af1fd4663 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.735074] env[62816]: DEBUG nova.compute.manager [req-d3baf079-c45a-471d-91be-4b09c0385e06 req-eeb56506-bbfe-492e-a179-e5fe04168766 service nova] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Detach interface failed, port_id=1408cc61-c46f-4333-bd79-7dec976cea2f, reason: Instance 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2093.979079] env[62816]: DEBUG nova.network.neutron [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.022891] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c80d773a-5cb6-4f92-aa9e-bfd43a0da4fe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.035496] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c9a2ca-0a99-45af-827e-20d0cfc9d046 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.049451] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789463, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.065154] env[62816]: DEBUG nova.compute.manager [req-803cac53-9df7-4f62-b2fa-ae8fecab6689 req-8a46909c-7ee1-429b-b633-f5d6441bb288 service nova] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Detach interface failed, port_id=eadfcc8c-606b-4352-8ce4-4ad681cc07c6, reason: Instance 5e6be756-2dba-4977-aad2-61c5e97dc761 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2094.172065] env[62816]: INFO nova.compute.manager [-] [instance: 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f] Took 1.27 seconds to deallocate network for instance. [ 2094.482735] env[62816]: INFO nova.compute.manager [-] [instance: 5e6be756-2dba-4977-aad2-61c5e97dc761] Took 1.42 seconds to deallocate network for instance. [ 2094.537125] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789463, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.678391] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.678660] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.678884] env[62816]: DEBUG nova.objects.instance [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'resources' on Instance uuid 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2094.989962] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.037422] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789463, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.231454] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2105a6-0187-4b52-b0c7-d0c926e4a1b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.239446] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6d9aff-1c08-4cc1-aa3c-d1b4f4e11ea0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.269780] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cb8c8e-5280-437c-af12-534f381a5eeb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.277273] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-155edbf1-fee1-4073-a38a-eb9b6242d990 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.290254] env[62816]: DEBUG nova.compute.provider_tree [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2095.538792] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789463, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.513625} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.539142] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589/4b2e9f7f-b090-4547-bdd0-d4516fcc7589.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2095.539244] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2095.539712] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-189c3bda-30ea-4221-b0ab-46b4c71b2cfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.546043] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2095.546043] env[62816]: value = "task-1789464" [ 2095.546043] env[62816]: _type = "Task" [ 2095.546043] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.553349] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.821197] env[62816]: DEBUG nova.scheduler.client.report [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 162 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2095.821482] env[62816]: DEBUG nova.compute.provider_tree [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 162 to 163 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2095.821666] env[62816]: DEBUG nova.compute.provider_tree [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2096.055274] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070497} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.055485] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2096.056291] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca17329c-1b36-4388-a215-7215fa780adf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.077591] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589/4b2e9f7f-b090-4547-bdd0-d4516fcc7589.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2096.077828] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b67022ee-49c1-43cd-be2b-c0326b342623 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.097125] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2096.097125] env[62816]: value = "task-1789465" [ 2096.097125] env[62816]: _type = "Task" [ 2096.097125] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.104347] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789465, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.326683] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.329408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.340s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.329743] env[62816]: DEBUG nova.objects.instance [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lazy-loading 'resources' on Instance uuid 5e6be756-2dba-4977-aad2-61c5e97dc761 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.345474] env[62816]: INFO nova.scheduler.client.report [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocations for instance 87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f [ 2096.608691] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789465, 'name': ReconfigVM_Task, 'duration_secs': 0.263837} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.609100] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589/4b2e9f7f-b090-4547-bdd0-d4516fcc7589.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2096.609514] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cfe3c6c6-974e-467b-b267-74cf299066ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.616294] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2096.616294] env[62816]: value = "task-1789466" [ 2096.616294] env[62816]: _type = "Task" [ 2096.616294] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.623875] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789466, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.853321] env[62816]: DEBUG oslo_concurrency.lockutils [None req-fb56dc76-5379-4a8a-bb50-61fd312a897f tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "87d309e5-4f5d-4ce8-bd34-d9ad8711ac2f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.571s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.874818] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805096c4-d606-458a-bcd9-8b69c25f0dc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.882090] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bffe02-1dc5-4dc9-8c75-6fd5165d47e9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.911767] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3becba5a-d288-45d7-b34b-968bafbd3376 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.918729] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc3bfc3-77c4-4524-b88b-a75db78eb3b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.931764] env[62816]: DEBUG nova.compute.provider_tree [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2097.127021] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789466, 'name': Rename_Task, 'duration_secs': 0.139712} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.127255] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2097.127500] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f0c0b565-1fd8-48e2-aba8-73e6d9ff985e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.133385] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2097.133385] env[62816]: value = "task-1789467" [ 2097.133385] env[62816]: _type = "Task" [ 2097.133385] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.142583] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789467, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.434961] env[62816]: DEBUG nova.scheduler.client.report [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2097.644817] env[62816]: DEBUG oslo_vmware.api [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789467, 'name': PowerOnVM_Task, 'duration_secs': 0.436466} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.645106] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2097.646007] env[62816]: INFO nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Took 8.73 seconds to spawn the instance on the hypervisor. [ 2097.646007] env[62816]: DEBUG nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2097.646320] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6308d92b-cd9f-4717-b454-51142228f96a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.941028] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.960149] env[62816]: INFO nova.scheduler.client.report [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Deleted allocations for instance 5e6be756-2dba-4977-aad2-61c5e97dc761 [ 2098.163551] env[62816]: INFO nova.compute.manager [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Took 14.18 seconds to build instance. [ 2098.169255] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "0eea9110-9194-4d75-b9af-ba386d96c129" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.169489] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.468214] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f756e8e1-d798-476d-b264-76d319b110c9 tempest-ServerActionsTestOtherB-1107504888 tempest-ServerActionsTestOtherB-1107504888-project-member] Lock "5e6be756-2dba-4977-aad2-61c5e97dc761" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.541s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.665752] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d5b2d1ea-35f7-4e1a-8278-077deccef757 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.688s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.672301] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2099.194082] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.194394] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.195993] env[62816]: INFO nova.compute.claims [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2099.203104] env[62816]: DEBUG nova.compute.manager [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2099.719831] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.256396] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34e23e1-1a54-4d53-92c7-54564a5d187f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.264102] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc017bf4-622b-4f23-b8c4-26873ec366dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.295023] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115b650c-5269-4f47-bfc9-da4681e8a2d1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.302338] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763a1f30-b0be-44be-aacd-7b5c905afd15 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.315357] env[62816]: DEBUG nova.compute.provider_tree [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2100.818903] env[62816]: DEBUG nova.scheduler.client.report [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2101.326105] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.326822] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2101.330115] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.610s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.832873] env[62816]: DEBUG nova.compute.utils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2101.834328] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2101.834501] env[62816]: DEBUG nova.network.neutron [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2101.838597] env[62816]: INFO nova.compute.claims [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2101.885042] env[62816]: DEBUG nova.policy [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59c5023a18d243b7aafb6d6181f931d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47e070d1729247ff83b4ff6997b45385', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2102.132499] env[62816]: DEBUG nova.network.neutron [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Successfully created port: 9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2102.337525] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2102.344161] env[62816]: INFO nova.compute.resource_tracker [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating resource usage from migration 6821d8cd-c2a8-4847-abba-8663647930d3 [ 2102.402398] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d318b584-68b2-4bf3-8700-6b6864a4363e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.411658] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5a30ca-1cac-41a6-9426-e8521d48c7c4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.442814] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e43b55e-cbf6-4360-84df-0247b2aa9138 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.449864] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf81c64-111b-4367-a0d5-7b9c2f8c4f78 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.463058] env[62816]: DEBUG nova.compute.provider_tree [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2102.984335] env[62816]: ERROR nova.scheduler.client.report [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [req-ece9245a-2a8b-495b-8e28-e4ead4fbe52a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ece9245a-2a8b-495b-8e28-e4ead4fbe52a"}]} [ 2103.000786] env[62816]: DEBUG nova.scheduler.client.report [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2103.015051] env[62816]: DEBUG nova.scheduler.client.report [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2103.015335] env[62816]: DEBUG nova.compute.provider_tree [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.025904] env[62816]: DEBUG nova.scheduler.client.report [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2103.042885] env[62816]: DEBUG nova.scheduler.client.report [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2103.085835] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-664a56dd-15ac-447b-a3e2-c4ba73e12929 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.093263] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9661ae0-4328-4517-ac45-c62a7abc8e79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.123575] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b7f3c2-0d41-40b5-86fe-9036a0992a31 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.130834] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f2c372-3318-4793-93b9-a244c7129f02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.144189] env[62816]: DEBUG nova.compute.provider_tree [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.353435] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2103.378756] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2103.379624] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2103.379624] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2103.379624] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2103.379922] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2103.380162] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2103.380478] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2103.380730] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2103.380998] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2103.381254] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2103.381489] env[62816]: DEBUG nova.virt.hardware [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2103.382393] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8aaf733-4528-4974-8fee-61ac676397ee {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.390572] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c392e6-7ea4-4c98-8db4-7f7cf881da39 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.617131] env[62816]: DEBUG nova.compute.manager [req-5ce841fa-6274-4503-98eb-52f3586a3689 req-67367d00-086a-4d4d-9a18-32d11864f6ac service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Received event network-vif-plugged-9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2103.617428] env[62816]: DEBUG oslo_concurrency.lockutils [req-5ce841fa-6274-4503-98eb-52f3586a3689 req-67367d00-086a-4d4d-9a18-32d11864f6ac service nova] Acquiring lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.617735] env[62816]: DEBUG oslo_concurrency.lockutils [req-5ce841fa-6274-4503-98eb-52f3586a3689 req-67367d00-086a-4d4d-9a18-32d11864f6ac service nova] Lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.618013] env[62816]: DEBUG oslo_concurrency.lockutils [req-5ce841fa-6274-4503-98eb-52f3586a3689 req-67367d00-086a-4d4d-9a18-32d11864f6ac service nova] Lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.618286] env[62816]: DEBUG nova.compute.manager [req-5ce841fa-6274-4503-98eb-52f3586a3689 req-67367d00-086a-4d4d-9a18-32d11864f6ac service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] No waiting events found dispatching network-vif-plugged-9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2103.618542] env[62816]: WARNING nova.compute.manager [req-5ce841fa-6274-4503-98eb-52f3586a3689 req-67367d00-086a-4d4d-9a18-32d11864f6ac service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Received unexpected event network-vif-plugged-9660e770-0412-4b45-a580-9b940740fcd1 for instance with vm_state building and task_state spawning. [ 2103.682505] env[62816]: DEBUG nova.scheduler.client.report [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 166 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2103.682767] env[62816]: DEBUG nova.compute.provider_tree [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 166 to 167 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2103.682949] env[62816]: DEBUG nova.compute.provider_tree [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2103.696960] env[62816]: DEBUG nova.network.neutron [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Successfully updated port: 9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2104.191412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.861s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2104.191677] env[62816]: INFO nova.compute.manager [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Migrating [ 2104.199995] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.200155] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.200302] env[62816]: DEBUG nova.network.neutron [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2104.711375] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.711550] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.711723] env[62816]: DEBUG nova.network.neutron [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2104.747521] env[62816]: DEBUG nova.network.neutron [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2104.879343] env[62816]: DEBUG nova.network.neutron [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [{"id": "9660e770-0412-4b45-a580-9b940740fcd1", "address": "fa:16:3e:bf:d0:c3", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9660e770-04", "ovs_interfaceid": "9660e770-0412-4b45-a580-9b940740fcd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.382062] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.382369] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Instance network_info: |[{"id": "9660e770-0412-4b45-a580-9b940740fcd1", "address": "fa:16:3e:bf:d0:c3", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9660e770-04", "ovs_interfaceid": "9660e770-0412-4b45-a580-9b940740fcd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2105.382848] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:d0:c3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '15ff34f9-4b02-4be1-b433-3ec4bd1b37c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9660e770-0412-4b45-a580-9b940740fcd1', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2105.390632] env[62816]: DEBUG oslo.service.loopingcall [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2105.390865] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2105.391133] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9671b7f-8257-47fc-b816-f3e16e6ce4c7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.413663] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2105.413663] env[62816]: value = "task-1789469" [ 2105.413663] env[62816]: _type = "Task" [ 2105.413663] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.421548] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789469, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.442071] env[62816]: DEBUG nova.network.neutron [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [{"id": "610e9246-a2bd-4611-a6cb-be369b3e41df", "address": "fa:16:3e:94:f3:a3", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap610e9246-a2", "ovs_interfaceid": "610e9246-a2bd-4611-a6cb-be369b3e41df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.650393] env[62816]: DEBUG nova.compute.manager [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Received event network-changed-9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2105.650605] env[62816]: DEBUG nova.compute.manager [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Refreshing instance network info cache due to event network-changed-9660e770-0412-4b45-a580-9b940740fcd1. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2105.650835] env[62816]: DEBUG oslo_concurrency.lockutils [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] Acquiring lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.650979] env[62816]: DEBUG oslo_concurrency.lockutils [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] Acquired lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.651153] env[62816]: DEBUG nova.network.neutron [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Refreshing network info cache for port 9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2105.923624] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789469, 'name': CreateVM_Task, 'duration_secs': 0.332382} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.923806] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2105.924477] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.924641] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.924960] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2105.925226] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4cc869a-1d50-4026-bf80-b9420e81a0ec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.929934] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2105.929934] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520f73ac-7c4a-fda3-1edd-c44978849335" [ 2105.929934] env[62816]: _type = "Task" [ 2105.929934] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.937326] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520f73ac-7c4a-fda3-1edd-c44978849335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.944956] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.363405] env[62816]: DEBUG nova.network.neutron [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updated VIF entry in instance network info cache for port 9660e770-0412-4b45-a580-9b940740fcd1. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2106.363849] env[62816]: DEBUG nova.network.neutron [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [{"id": "9660e770-0412-4b45-a580-9b940740fcd1", "address": "fa:16:3e:bf:d0:c3", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9660e770-04", "ovs_interfaceid": "9660e770-0412-4b45-a580-9b940740fcd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2106.441263] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520f73ac-7c4a-fda3-1edd-c44978849335, 'name': SearchDatastore_Task, 'duration_secs': 0.011133} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.441583] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.442546] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2106.442820] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2106.442972] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2106.443174] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2106.443498] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e034887b-0c13-4ddd-bebc-e01342b92ea2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.453626] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2106.453626] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2106.453866] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dfaba05-894b-4dca-8aa0-0d29b3057f3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.459409] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2106.459409] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5229ec3e-d6cd-f36a-6903-cb820b881766" [ 2106.459409] env[62816]: _type = "Task" [ 2106.459409] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.467430] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5229ec3e-d6cd-f36a-6903-cb820b881766, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.867081] env[62816]: DEBUG oslo_concurrency.lockutils [req-5efd2fe1-e45e-4c6c-b070-a84d9cda72f4 req-e12b878b-1c0c-4f60-b5ec-4bc2db0cafe3 service nova] Releasing lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.969492] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5229ec3e-d6cd-f36a-6903-cb820b881766, 'name': SearchDatastore_Task, 'duration_secs': 0.009117} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.970271] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c99051-c0d3-47fa-8493-485bd5c306e5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.976497] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2106.976497] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d44eb9-1700-ebe7-1814-f7bf42e190ce" [ 2106.976497] env[62816]: _type = "Task" [ 2106.976497] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.985425] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d44eb9-1700-ebe7-1814-f7bf42e190ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.463258] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d6f0cc-c8ab-4d7e-a15e-eefd9440464b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.481725] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2107.494255] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d44eb9-1700-ebe7-1814-f7bf42e190ce, 'name': SearchDatastore_Task, 'duration_secs': 0.009851} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.494502] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2107.494798] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129/0eea9110-9194-4d75-b9af-ba386d96c129.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2107.495075] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-799d3471-2af9-4fd8-8cd0-2ecc03a577e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.501228] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2107.501228] env[62816]: value = "task-1789470" [ 2107.501228] env[62816]: _type = "Task" [ 2107.501228] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.509037] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.542884] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "30884afd-63d4-4a08-a59a-a9dcb4269dba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.543165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2107.990295] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2107.990626] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6f4a4fa-329b-4971-8d47-91bb5850ad9c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.997522] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2107.997522] env[62816]: value = "task-1789471" [ 2107.997522] env[62816]: _type = "Task" [ 2107.997522] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.008473] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789471, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.013663] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472584} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.014056] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129/0eea9110-9194-4d75-b9af-ba386d96c129.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2108.014203] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2108.014445] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94aa0035-2717-4a29-bdf6-3ae71aea3d64 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.020720] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2108.020720] env[62816]: value = "task-1789472" [ 2108.020720] env[62816]: _type = "Task" [ 2108.020720] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.029404] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.045666] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2108.507579] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789471, 'name': PowerOffVM_Task, 'duration_secs': 0.209161} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.507992] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2108.508084] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2108.529093] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06475} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.530399] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2108.530399] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481649ae-6839-438c-b8bf-c7942116e43f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.552134] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129/0eea9110-9194-4d75-b9af-ba386d96c129.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2108.554355] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4f41c84-d726-4d1b-b978-e81a06ebc18d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.576788] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2108.576788] env[62816]: value = "task-1789473" [ 2108.576788] env[62816]: _type = "Task" [ 2108.576788] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.584676] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.585889] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.586228] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.587744] env[62816]: INFO nova.compute.claims [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2109.015167] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2109.015442] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2109.015636] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2109.015837] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2109.015987] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2109.016200] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2109.016427] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2109.016602] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2109.016795] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2109.016969] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2109.017162] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2109.022127] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c38fabfe-2395-4627-bc3e-0dce48686343 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.037843] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2109.037843] env[62816]: value = "task-1789474" [ 2109.037843] env[62816]: _type = "Task" [ 2109.037843] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.045624] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.086493] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789473, 'name': ReconfigVM_Task, 'duration_secs': 0.274085} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.087292] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129/0eea9110-9194-4d75-b9af-ba386d96c129.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2109.087665] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c36bcd39-9f85-445f-91df-7a8946aa4c3c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.094616] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2109.094616] env[62816]: value = "task-1789475" [ 2109.094616] env[62816]: _type = "Task" [ 2109.094616] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.102468] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789475, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.548074] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789474, 'name': ReconfigVM_Task, 'duration_secs': 0.396651} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.548412] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2109.607743] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789475, 'name': Rename_Task, 'duration_secs': 0.240943} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.608068] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2109.608314] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b62a4cd1-a372-4302-a7f1-15da7331f547 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.614863] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2109.614863] env[62816]: value = "task-1789476" [ 2109.614863] env[62816]: _type = "Task" [ 2109.614863] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2109.622242] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789476, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.654578] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de284ad1-c3bd-40e9-8628-9fd1f2e8169e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.663325] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4408e708-fd74-4565-9cc0-5d0dcbec1ecf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.692460] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549bd5ab-35b8-43f7-a268-1bfebc5cc7cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.699592] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86587c35-230e-441d-84c0-7d7675f55bab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.712437] env[62816]: DEBUG nova.compute.provider_tree [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.055526] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2110.055874] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2110.056074] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2110.056282] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2110.056490] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2110.056741] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2110.057009] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2110.057138] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2110.057339] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2110.057517] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2110.057714] env[62816]: DEBUG nova.virt.hardware [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2110.063697] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2110.064015] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bc9c5259-ba1c-45f9-a10f-18dc3e6c537d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.081938] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2110.081938] env[62816]: value = "task-1789477" [ 2110.081938] env[62816]: _type = "Task" [ 2110.081938] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.090525] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789477, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.124859] env[62816]: DEBUG oslo_vmware.api [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789476, 'name': PowerOnVM_Task, 'duration_secs': 0.444719} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.125165] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2110.125370] env[62816]: INFO nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Took 6.77 seconds to spawn the instance on the hypervisor. [ 2110.125573] env[62816]: DEBUG nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2110.126345] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-879d6a02-5670-429b-a82c-7b9a5ea34b71 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.215933] env[62816]: DEBUG nova.scheduler.client.report [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2110.593872] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789477, 'name': ReconfigVM_Task, 'duration_secs': 0.158659} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.594245] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2110.594934] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352d2ad5-4c56-45e1-915f-6262c8fb543e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.616781] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589/4b2e9f7f-b090-4547-bdd0-d4516fcc7589.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2110.617065] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c14af2a2-e451-473c-b0c8-0249f870e986 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.634112] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2110.634112] env[62816]: value = "task-1789478" [ 2110.634112] env[62816]: _type = "Task" [ 2110.634112] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.645727] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789478, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.646165] env[62816]: INFO nova.compute.manager [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Took 11.47 seconds to build instance. [ 2110.720704] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.134s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.721279] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2111.144043] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789478, 'name': ReconfigVM_Task, 'duration_secs': 0.26361} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.144356] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589/4b2e9f7f-b090-4547-bdd0-d4516fcc7589.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2111.144635] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2111.148067] env[62816]: DEBUG oslo_concurrency.lockutils [None req-855ada83-8542-41bb-a093-6d566a2e89cd tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.978s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.226982] env[62816]: DEBUG nova.compute.utils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2111.228272] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2111.228401] env[62816]: DEBUG nova.network.neutron [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2111.267352] env[62816]: DEBUG nova.policy [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2111.591141] env[62816]: DEBUG nova.network.neutron [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Successfully created port: 465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2111.651675] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb0d2db-74f4-4e8c-bfaf-76dd23a767b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.681540] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7decaa9-5b25-4aca-91dd-6a6202a409c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.700220] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2111.731872] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2112.154811] env[62816]: DEBUG nova.compute.manager [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Stashing vm_state: active {{(pid=62816) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 2112.238814] env[62816]: DEBUG nova.network.neutron [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Port 610e9246-a2bd-4611-a6cb-be369b3e41df binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2112.675391] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.675711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.741616] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2112.774553] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2112.774940] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2112.775154] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2112.775411] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2112.775607] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2112.775780] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2112.776015] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2112.776185] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2112.776355] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2112.776521] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2112.776699] env[62816]: DEBUG nova.virt.hardware [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2112.777652] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e1ac72-77e0-4882-9e6c-955a72e604aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2112.786579] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8237f1f-22f7-4988-bb4a-4a213fa88581 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.181831] env[62816]: INFO nova.compute.claims [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2113.192453] env[62816]: DEBUG nova.compute.manager [req-928bbb3c-c8c5-47dd-9151-f5806ebc3523 req-8765c239-6d70-4cbb-a87b-6430eb71aa7b service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Received event network-vif-plugged-465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2113.192686] env[62816]: DEBUG oslo_concurrency.lockutils [req-928bbb3c-c8c5-47dd-9151-f5806ebc3523 req-8765c239-6d70-4cbb-a87b-6430eb71aa7b service nova] Acquiring lock "30884afd-63d4-4a08-a59a-a9dcb4269dba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.192893] env[62816]: DEBUG oslo_concurrency.lockutils [req-928bbb3c-c8c5-47dd-9151-f5806ebc3523 req-8765c239-6d70-4cbb-a87b-6430eb71aa7b service nova] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.193073] env[62816]: DEBUG oslo_concurrency.lockutils [req-928bbb3c-c8c5-47dd-9151-f5806ebc3523 req-8765c239-6d70-4cbb-a87b-6430eb71aa7b service nova] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.193361] env[62816]: DEBUG nova.compute.manager [req-928bbb3c-c8c5-47dd-9151-f5806ebc3523 req-8765c239-6d70-4cbb-a87b-6430eb71aa7b service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] No waiting events found dispatching network-vif-plugged-465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2113.193593] env[62816]: WARNING nova.compute.manager [req-928bbb3c-c8c5-47dd-9151-f5806ebc3523 req-8765c239-6d70-4cbb-a87b-6430eb71aa7b service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Received unexpected event network-vif-plugged-465cd9c4-6d8e-4837-8b90-d36e77571bb6 for instance with vm_state building and task_state spawning. [ 2113.266351] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2113.266640] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2113.266781] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2113.340253] env[62816]: DEBUG nova.network.neutron [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Successfully updated port: 465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2113.696066] env[62816]: INFO nova.compute.resource_tracker [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating resource usage from migration 43475881-221d-4c93-9935-e564e18b4eef [ 2113.768571] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84ca9a6-b8a1-4c20-a279-859e65e73aca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.777683] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a286a3-afd6-426b-a07d-9ff57305d18a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.807289] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66cde04f-4613-4f74-8da5-260660b3fd13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.814157] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de206d55-37bf-48b3-bddd-8bd7d99a56d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.826746] env[62816]: DEBUG nova.compute.provider_tree [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.842343] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2113.842469] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2113.842616] env[62816]: DEBUG nova.network.neutron [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2114.301675] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2114.301867] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2114.302066] env[62816]: DEBUG nova.network.neutron [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2114.329273] env[62816]: DEBUG nova.scheduler.client.report [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2114.375878] env[62816]: DEBUG nova.network.neutron [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2114.546130] env[62816]: DEBUG nova.network.neutron [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updating instance_info_cache with network_info: [{"id": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "address": "fa:16:3e:2a:d8:38", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465cd9c4-6d", "ovs_interfaceid": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2114.835064] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.159s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.835444] env[62816]: INFO nova.compute.manager [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Migrating [ 2115.003377] env[62816]: DEBUG nova.network.neutron [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [{"id": "610e9246-a2bd-4611-a6cb-be369b3e41df", "address": "fa:16:3e:94:f3:a3", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap610e9246-a2", "ovs_interfaceid": "610e9246-a2bd-4611-a6cb-be369b3e41df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2115.049302] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.049573] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Instance network_info: |[{"id": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "address": "fa:16:3e:2a:d8:38", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465cd9c4-6d", "ovs_interfaceid": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2115.049989] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:d8:38', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '465cd9c4-6d8e-4837-8b90-d36e77571bb6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2115.057699] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating folder: Project (fc3b1be6e60f4c55be156abede3ea8ce). Parent ref: group-v370905. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2115.058088] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-164ff19a-1f8c-4662-9ce5-d99d34597142 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.069129] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created folder: Project (fc3b1be6e60f4c55be156abede3ea8ce) in parent group-v370905. [ 2115.069315] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating folder: Instances. Parent ref: group-v371228. {{(pid=62816) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2115.069537] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f826f35-0e46-42e4-b50f-048fead2fc0a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.077680] env[62816]: INFO nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created folder: Instances in parent group-v371228. [ 2115.078559] env[62816]: DEBUG oslo.service.loopingcall [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2115.078559] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2115.078559] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f827451-29e8-41c2-b44f-3f915e4505e3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.096558] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2115.096558] env[62816]: value = "task-1789481" [ 2115.096558] env[62816]: _type = "Task" [ 2115.096558] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.103601] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789481, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.216254] env[62816]: DEBUG nova.compute.manager [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Received event network-changed-465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2115.216453] env[62816]: DEBUG nova.compute.manager [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Refreshing instance network info cache due to event network-changed-465cd9c4-6d8e-4837-8b90-d36e77571bb6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2115.216674] env[62816]: DEBUG oslo_concurrency.lockutils [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] Acquiring lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.216823] env[62816]: DEBUG oslo_concurrency.lockutils [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] Acquired lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.216989] env[62816]: DEBUG nova.network.neutron [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Refreshing network info cache for port 465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2115.352309] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.352510] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.352675] env[62816]: DEBUG nova.network.neutron [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2115.506107] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2115.606012] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789481, 'name': CreateVM_Task, 'duration_secs': 0.318329} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.606179] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2115.606888] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2115.607090] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2115.607410] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2115.607668] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-692f3817-78bc-47df-96d2-73c1169852b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.612159] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2115.612159] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52694cc5-fdd5-48c3-a6d8-fd61654268df" [ 2115.612159] env[62816]: _type = "Task" [ 2115.612159] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.620245] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52694cc5-fdd5-48c3-a6d8-fd61654268df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.905091] env[62816]: DEBUG nova.network.neutron [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updated VIF entry in instance network info cache for port 465cd9c4-6d8e-4837-8b90-d36e77571bb6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2115.905501] env[62816]: DEBUG nova.network.neutron [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updating instance_info_cache with network_info: [{"id": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "address": "fa:16:3e:2a:d8:38", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465cd9c4-6d", "ovs_interfaceid": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.033028] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311fda99-e5cd-446b-aaff-b8b086f22349 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.053220] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7009d8a8-6699-42ba-984a-6a3cfc459cef {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.060121] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2116.087058] env[62816]: DEBUG nova.network.neutron [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [{"id": "9660e770-0412-4b45-a580-9b940740fcd1", "address": "fa:16:3e:bf:d0:c3", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9660e770-04", "ovs_interfaceid": "9660e770-0412-4b45-a580-9b940740fcd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2116.121684] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52694cc5-fdd5-48c3-a6d8-fd61654268df, 'name': SearchDatastore_Task, 'duration_secs': 0.012278} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.122186] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.122524] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2116.122864] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2116.123139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2116.123443] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2116.123778] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a70d27a-e526-4153-8b6e-6b28664ac7af {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.131613] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2116.131910] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2116.132877] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a46a021-d8d3-4c39-9ed6-86722677421c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.137601] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2116.137601] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]523f8ece-3588-1cf1-5a6d-b9c0be62da3d" [ 2116.137601] env[62816]: _type = "Task" [ 2116.137601] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.144585] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523f8ece-3588-1cf1-5a6d-b9c0be62da3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.408239] env[62816]: DEBUG oslo_concurrency.lockutils [req-27abaaeb-a8e5-4f8b-ab98-caa27980f50b req-c5f0b84b-b58c-41c2-8bf5-4933acc0b6f1 service nova] Releasing lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.566666] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2116.567013] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5eb5c4b4-cd6e-4780-94e0-bd1f40ca578d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.574083] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2116.574083] env[62816]: value = "task-1789482" [ 2116.574083] env[62816]: _type = "Task" [ 2116.574083] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.581944] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2116.589531] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2116.648276] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]523f8ece-3588-1cf1-5a6d-b9c0be62da3d, 'name': SearchDatastore_Task, 'duration_secs': 0.008137} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.649077] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-570356e4-2dda-41d8-bd3d-5bc6d0d3e69f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.654353] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2116.654353] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52275c4e-a6e2-ddca-cd41-fa479691e396" [ 2116.654353] env[62816]: _type = "Task" [ 2116.654353] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2116.661780] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52275c4e-a6e2-ddca-cd41-fa479691e396, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.083958] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789482, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.166717] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52275c4e-a6e2-ddca-cd41-fa479691e396, 'name': SearchDatastore_Task, 'duration_secs': 0.009106} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.166949] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2117.167228] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 30884afd-63d4-4a08-a59a-a9dcb4269dba/30884afd-63d4-4a08-a59a-a9dcb4269dba.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2117.167488] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a44f988d-5197-4c05-851f-1250e623a3c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.174493] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2117.174493] env[62816]: value = "task-1789483" [ 2117.174493] env[62816]: _type = "Task" [ 2117.174493] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.181822] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.585022] env[62816]: DEBUG oslo_vmware.api [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789482, 'name': PowerOnVM_Task, 'duration_secs': 0.562796} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.585328] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2117.585588] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8bb05e49-6b48-46f8-9a42-205f0a496034 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance '4b2e9f7f-b090-4547-bdd0-d4516fcc7589' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2117.684676] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443261} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.684916] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 30884afd-63d4-4a08-a59a-a9dcb4269dba/30884afd-63d4-4a08-a59a-a9dcb4269dba.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2117.685149] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2117.685414] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1262c172-d65e-4da3-a02e-30d99a914c06 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.691631] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2117.691631] env[62816]: value = "task-1789484" [ 2117.691631] env[62816]: _type = "Task" [ 2117.691631] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.699925] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789484, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.104321] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be5a585-ad0a-458d-ad09-f8c6343d81a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.124843] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 0 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2118.201893] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789484, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068156} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2118.202185] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2118.202942] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5650b3-05c8-4bdf-8132-5e2e0121dd74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.224829] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] 30884afd-63d4-4a08-a59a-a9dcb4269dba/30884afd-63d4-4a08-a59a-a9dcb4269dba.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2118.225051] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84022bbd-673f-4a86-a3f6-e528f8256cd8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.244344] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2118.244344] env[62816]: value = "task-1789485" [ 2118.244344] env[62816]: _type = "Task" [ 2118.244344] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.252046] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789485, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.631197] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2118.631519] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e456c0c3-b6ed-43c8-965b-bd0bf4208e8e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.638657] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2118.638657] env[62816]: value = "task-1789486" [ 2118.638657] env[62816]: _type = "Task" [ 2118.638657] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2118.647731] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789486, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2118.754574] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789485, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.153089] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789486, 'name': PowerOffVM_Task, 'duration_secs': 0.186872} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.153089] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2119.153089] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 17 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2119.255323] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789485, 'name': ReconfigVM_Task, 'duration_secs': 0.872052} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.255771] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Reconfigured VM instance instance-00000072 to attach disk [datastore1] 30884afd-63d4-4a08-a59a-a9dcb4269dba/30884afd-63d4-4a08-a59a-a9dcb4269dba.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2119.256614] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0652a05e-e60c-4990-9cef-7633d79dd545 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.266017] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2119.266017] env[62816]: value = "task-1789487" [ 2119.266017] env[62816]: _type = "Task" [ 2119.266017] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.276761] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789487, 'name': Rename_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.659918] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2119.660219] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2119.660306] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2119.660492] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2119.660639] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2119.660787] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2119.660991] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2119.661167] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2119.661336] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2119.661501] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2119.661678] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2119.666716] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4790b6f8-38a8-4831-a2f3-28f3988dfc34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.682749] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2119.682749] env[62816]: value = "task-1789488" [ 2119.682749] env[62816]: _type = "Task" [ 2119.682749] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.690148] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.773723] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789487, 'name': Rename_Task, 'duration_secs': 0.28213} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2119.773999] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2119.774257] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0cb6d1b-fe35-4e0c-8c0d-d2845456d1d6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2119.781263] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2119.781263] env[62816]: value = "task-1789489" [ 2119.781263] env[62816]: _type = "Task" [ 2119.781263] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2119.789092] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789489, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2119.910958] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.911298] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.911517] env[62816]: DEBUG nova.compute.manager [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Going to confirm migration 7 {{(pid=62816) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2120.192497] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789488, 'name': ReconfigVM_Task, 'duration_secs': 0.131603} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.192820] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 33 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2120.291519] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789489, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.470873] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.471133] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.471366] env[62816]: DEBUG nova.network.neutron [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2120.471603] env[62816]: DEBUG nova.objects.instance [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'info_cache' on Instance uuid 4b2e9f7f-b090-4547-bdd0-d4516fcc7589 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2120.699537] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2120.699871] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2120.700138] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2120.700389] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2120.700619] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2120.700861] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2120.701163] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2120.701412] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2120.701641] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2120.701868] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2120.702113] env[62816]: DEBUG nova.virt.hardware [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2120.709689] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Reconfiguring VM instance instance-00000071 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2120.710044] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c4de62c-d06f-49eb-808e-d65a9cac9e08 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.729349] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2120.729349] env[62816]: value = "task-1789490" [ 2120.729349] env[62816]: _type = "Task" [ 2120.729349] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.737473] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789490, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.791816] env[62816]: DEBUG oslo_vmware.api [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789489, 'name': PowerOnVM_Task, 'duration_secs': 0.948099} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.792083] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2120.792287] env[62816]: INFO nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Took 8.05 seconds to spawn the instance on the hypervisor. [ 2120.792465] env[62816]: DEBUG nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2120.793227] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b12740ed-fb92-44d6-a874-a9489f5fe7f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.239650] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789490, 'name': ReconfigVM_Task, 'duration_secs': 0.170765} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.239934] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Reconfigured VM instance instance-00000071 to detach disk 2000 {{(pid=62816) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2121.241034] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1f3fac-d9ca-4b1b-926b-b3b541a635bb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.262888] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129/0eea9110-9194-4d75-b9af-ba386d96c129.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2121.263193] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe9e107d-845b-4989-b257-f7b0c13fdb13 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.281228] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2121.281228] env[62816]: value = "task-1789491" [ 2121.281228] env[62816]: _type = "Task" [ 2121.281228] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.288979] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789491, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.310959] env[62816]: INFO nova.compute.manager [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Took 12.76 seconds to build instance. [ 2121.742467] env[62816]: DEBUG nova.network.neutron [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [{"id": "610e9246-a2bd-4611-a6cb-be369b3e41df", "address": "fa:16:3e:94:f3:a3", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap610e9246-a2", "ovs_interfaceid": "610e9246-a2bd-4611-a6cb-be369b3e41df", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.791234] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789491, 'name': ReconfigVM_Task, 'duration_secs': 0.284383} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.791526] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129/0eea9110-9194-4d75-b9af-ba386d96c129.vmdk or device None with type thin {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2121.791797] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 50 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2121.812925] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8f585d9-2441-49ce-9343-f4a57b1962a6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.270s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.245428] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-4b2e9f7f-b090-4547-bdd0-d4516fcc7589" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.245802] env[62816]: DEBUG nova.objects.instance [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'migration_context' on Instance uuid 4b2e9f7f-b090-4547-bdd0-d4516fcc7589 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2122.298327] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbe259a-2d26-4477-8e9d-45f1aca5a53f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.317364] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbb6b0c-7b2a-42ab-af1c-90b5c91b78d5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.336120] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 67 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2122.506251] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "61a134d9-e02d-48ca-a800-bcd0a19228ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.506482] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.748342] env[62816]: DEBUG nova.objects.base [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Object Instance<4b2e9f7f-b090-4547-bdd0-d4516fcc7589> lazy-loaded attributes: info_cache,migration_context {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2122.749320] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bcbd5b-5f67-404f-87cd-e59607b2a0f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.769134] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b258e0d-515c-4c33-b8cd-849c6b89447a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.775085] env[62816]: DEBUG oslo_vmware.api [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2122.775085] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f1e398-bc6f-1cab-ecce-548b014dfd2c" [ 2122.775085] env[62816]: _type = "Task" [ 2122.775085] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.782769] env[62816]: DEBUG oslo_vmware.api [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f1e398-bc6f-1cab-ecce-548b014dfd2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.874028] env[62816]: DEBUG nova.network.neutron [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Port 9660e770-0412-4b45-a580-9b940740fcd1 binding to destination host cpu-1 is already ACTIVE {{(pid=62816) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2123.008532] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2123.287148] env[62816]: DEBUG oslo_vmware.api [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f1e398-bc6f-1cab-ecce-548b014dfd2c, 'name': SearchDatastore_Task, 'duration_secs': 0.008051} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.287502] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.288932] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.606832] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.878052] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ab163a-6295-4f27-bc3b-4629b306fce7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.894794] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5f4ef34-3434-43b4-ae41-7a5af2f25b3a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.903818] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2123.904100] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2123.904323] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.934294] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd5fc2f-f746-4ad0-a3ca-62d31f291d21 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.942961] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdde8bd-21a3-447b-9878-82bb637bec2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.959668] env[62816]: DEBUG nova.compute.provider_tree [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2124.479065] env[62816]: ERROR nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [req-74a235f6-9bbb-4539-ae45-3c4e08eb761b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-74a235f6-9bbb-4539-ae45-3c4e08eb761b"}]} [ 2124.499777] env[62816]: DEBUG nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2124.513383] env[62816]: DEBUG nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2124.513611] env[62816]: DEBUG nova.compute.provider_tree [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2124.524892] env[62816]: DEBUG nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2124.541221] env[62816]: DEBUG nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2124.613293] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b4328c4-e1e8-47a3-ae37-cb0cf6f74e38 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.620427] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961f3629-7dcd-4752-9795-5e346da26706 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.650779] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9e2597-4a2b-4852-bdf7-c811259b6273 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.657251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8f2d19f-70ec-4cbd-ad33-95e5ffed27fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.669731] env[62816]: DEBUG nova.compute.provider_tree [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2124.935995] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2124.936212] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.936391] env[62816]: DEBUG nova.network.neutron [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2125.197482] env[62816]: DEBUG nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 172 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2125.197867] env[62816]: DEBUG nova.compute.provider_tree [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 172 to 173 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2125.197971] env[62816]: DEBUG nova.compute.provider_tree [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2125.679792] env[62816]: DEBUG nova.network.neutron [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [{"id": "9660e770-0412-4b45-a580-9b940740fcd1", "address": "fa:16:3e:bf:d0:c3", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9660e770-04", "ovs_interfaceid": "9660e770-0412-4b45-a580-9b940740fcd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.183418] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.208196] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.920s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.210453] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.604s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2126.211861] env[62816]: INFO nova.compute.claims [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2126.704251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9078dc16-e74b-48e9-a4f3-b1ea16b00a54 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.729750] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7874c04-9d59-4057-8f95-4e27b97c33a7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.737030] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 83 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2126.777102] env[62816]: INFO nova.scheduler.client.report [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocation for migration 6821d8cd-c2a8-4847-abba-8663647930d3 [ 2127.242397] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2127.242700] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-343db5be-4839-492d-8c5a-f705e0094800 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.249810] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2127.249810] env[62816]: value = "task-1789492" [ 2127.249810] env[62816]: _type = "Task" [ 2127.249810] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.259650] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.282408] env[62816]: DEBUG oslo_concurrency.lockutils [None req-a1705e22-432c-4a68-a34a-48b89bdae6ac tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.371s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2127.311890] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b632ca6-883b-48f8-b13c-20a22e19dbaa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.319719] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870799f1-e0e6-496c-b787-9c2decf729a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.349217] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecd699f-f2a2-4a7c-81e7-d24fbf70a9a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.356406] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecd0b2b-0d85-495a-80f6-000051b9ce2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.370949] env[62816]: DEBUG nova.compute.provider_tree [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2127.762837] env[62816]: DEBUG oslo_vmware.api [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789492, 'name': PowerOnVM_Task, 'duration_secs': 0.376257} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.762837] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2127.763263] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8ec0700f-8836-452d-9638-455b61207e60 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance '0eea9110-9194-4d75-b9af-ba386d96c129' progress to 100 {{(pid=62816) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2127.873928] env[62816]: DEBUG nova.scheduler.client.report [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2128.085353] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.085769] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.086026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.086210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.086383] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.088579] env[62816]: INFO nova.compute.manager [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Terminating instance [ 2128.090402] env[62816]: DEBUG nova.compute.manager [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2128.090600] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2128.091448] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e5f05e-4450-40f3-b9a8-de6b31b29c40 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.099225] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2128.099447] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a453d5b-52d7-46c1-ad4d-b8483aa3468e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.105367] env[62816]: DEBUG oslo_vmware.api [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2128.105367] env[62816]: value = "task-1789493" [ 2128.105367] env[62816]: _type = "Task" [ 2128.105367] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.112355] env[62816]: DEBUG oslo_vmware.api [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789493, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.379929] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.169s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.380473] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2128.506335] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "8048544b-8947-4f87-8932-9e53dcbf5712" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.506589] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "8048544b-8947-4f87-8932-9e53dcbf5712" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.615616] env[62816]: DEBUG oslo_vmware.api [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789493, 'name': PowerOffVM_Task, 'duration_secs': 0.271753} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.615883] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2128.616097] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2128.616369] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33df54e3-2219-443e-ac28-e35028921d0b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.697883] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2128.698193] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2128.698415] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] 4b2e9f7f-b090-4547-bdd0-d4516fcc7589 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2128.698647] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96d7e6f0-2bbc-482f-953b-46ea07a1f1c9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.704447] env[62816]: DEBUG oslo_vmware.api [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2128.704447] env[62816]: value = "task-1789495" [ 2128.704447] env[62816]: _type = "Task" [ 2128.704447] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.713649] env[62816]: DEBUG oslo_vmware.api [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.885396] env[62816]: DEBUG nova.compute.utils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2128.887638] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2128.887822] env[62816]: DEBUG nova.network.neutron [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2128.925070] env[62816]: DEBUG nova.policy [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2129.009228] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2129.173690] env[62816]: DEBUG nova.network.neutron [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Successfully created port: 10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2129.214604] env[62816]: DEBUG oslo_vmware.api [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143932} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.214869] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2129.215571] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2129.215768] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2129.215953] env[62816]: INFO nova.compute.manager [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2129.216216] env[62816]: DEBUG oslo.service.loopingcall [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2129.216439] env[62816]: DEBUG nova.compute.manager [-] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2129.216570] env[62816]: DEBUG nova.network.neutron [-] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2129.391103] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2129.532762] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2129.533031] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2129.534521] env[62816]: INFO nova.compute.claims [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2129.554443] env[62816]: DEBUG nova.compute.manager [req-84dfc3b3-d98c-4267-97b6-5cb90d241693 req-349baf54-2523-403e-824a-7e5d7d58301d service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Received event network-vif-deleted-610e9246-a2bd-4611-a6cb-be369b3e41df {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2129.554665] env[62816]: INFO nova.compute.manager [req-84dfc3b3-d98c-4267-97b6-5cb90d241693 req-349baf54-2523-403e-824a-7e5d7d58301d service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Neutron deleted interface 610e9246-a2bd-4611-a6cb-be369b3e41df; detaching it from the instance and deleting it from the info cache [ 2129.554807] env[62816]: DEBUG nova.network.neutron [req-84dfc3b3-d98c-4267-97b6-5cb90d241693 req-349baf54-2523-403e-824a-7e5d7d58301d service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.032288] env[62816]: DEBUG nova.network.neutron [-] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.040268] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.040474] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.040513] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2130.040627] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2130.058795] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-db4f3254-c8b5-4a54-97ee-9e0bb4ce177b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.068909] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e144d4-3e35-4858-9deb-300db5ae3270 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.094895] env[62816]: DEBUG nova.compute.manager [req-84dfc3b3-d98c-4267-97b6-5cb90d241693 req-349baf54-2523-403e-824a-7e5d7d58301d service nova] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Detach interface failed, port_id=610e9246-a2bd-4611-a6cb-be369b3e41df, reason: Instance 4b2e9f7f-b090-4547-bdd0-d4516fcc7589 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2130.400552] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2130.425492] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2130.425759] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2130.425922] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2130.426129] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2130.426276] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2130.426423] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2130.426633] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2130.426793] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2130.426960] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2130.427140] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2130.427318] env[62816]: DEBUG nova.virt.hardware [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2130.428219] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494f6731-6105-4de1-b840-af24098a4d5c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.436075] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392a8be5-4f9c-4d79-a33f-e523baf14a25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.535557] env[62816]: INFO nova.compute.manager [-] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Took 1.32 seconds to deallocate network for instance. [ 2130.546123] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 4b2e9f7f-b090-4547-bdd0-d4516fcc7589] Skipping network cache update for instance because it is being deleted. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 2130.546302] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Skipping network cache update for instance because it is being deleted. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9946}} [ 2130.546405] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Skipping network cache update for instance because it is Building. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2130.546548] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Skipping network cache update for instance because it is Building. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2130.562848] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "0eea9110-9194-4d75-b9af-ba386d96c129" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.563150] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.563358] env[62816]: DEBUG nova.compute.manager [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Going to confirm migration 8 {{(pid=62816) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 2130.566311] env[62816]: DEBUG nova.compute.manager [req-4c41e9da-d4a5-4d9f-b3c0-364d9cb190cc req-e8452603-046c-4d8f-b3b9-a45c888d0a22 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Received event network-vif-plugged-10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2130.566311] env[62816]: DEBUG oslo_concurrency.lockutils [req-4c41e9da-d4a5-4d9f-b3c0-364d9cb190cc req-e8452603-046c-4d8f-b3b9-a45c888d0a22 service nova] Acquiring lock "61a134d9-e02d-48ca-a800-bcd0a19228ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2130.566393] env[62816]: DEBUG oslo_concurrency.lockutils [req-4c41e9da-d4a5-4d9f-b3c0-364d9cb190cc req-e8452603-046c-4d8f-b3b9-a45c888d0a22 service nova] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2130.566540] env[62816]: DEBUG oslo_concurrency.lockutils [req-4c41e9da-d4a5-4d9f-b3c0-364d9cb190cc req-e8452603-046c-4d8f-b3b9-a45c888d0a22 service nova] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.566862] env[62816]: DEBUG nova.compute.manager [req-4c41e9da-d4a5-4d9f-b3c0-364d9cb190cc req-e8452603-046c-4d8f-b3b9-a45c888d0a22 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] No waiting events found dispatching network-vif-plugged-10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2130.567069] env[62816]: WARNING nova.compute.manager [req-4c41e9da-d4a5-4d9f-b3c0-364d9cb190cc req-e8452603-046c-4d8f-b3b9-a45c888d0a22 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Received unexpected event network-vif-plugged-10c56c5d-1763-4ce9-a994-84fe8819b463 for instance with vm_state building and task_state spawning. [ 2130.574894] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2130.575026] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2130.575176] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2130.575322] env[62816]: DEBUG nova.objects.instance [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lazy-loading 'info_cache' on Instance uuid 30884afd-63d4-4a08-a59a-a9dcb4269dba {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2130.632558] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3de379f-1d75-4f21-9839-732ad0ddf7ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.639992] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de98a7f-bfc4-4a77-b6ce-42482642a582 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.671372] env[62816]: DEBUG nova.network.neutron [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Successfully updated port: 10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2130.673705] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34df8303-d206-4b0d-b65b-eb29974a05c8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.682193] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcb1700-0e62-4ce2-b638-766fade5f571 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.697204] env[62816]: DEBUG nova.compute.provider_tree [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2131.042021] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.127038] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2131.127241] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquired lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2131.127420] env[62816]: DEBUG nova.network.neutron [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2131.127646] env[62816]: DEBUG nova.objects.instance [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'info_cache' on Instance uuid 0eea9110-9194-4d75-b9af-ba386d96c129 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2131.173884] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-61a134d9-e02d-48ca-a800-bcd0a19228ec" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2131.174049] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-61a134d9-e02d-48ca-a800-bcd0a19228ec" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2131.174202] env[62816]: DEBUG nova.network.neutron [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2131.199469] env[62816]: DEBUG nova.scheduler.client.report [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2131.702846] env[62816]: DEBUG nova.network.neutron [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2131.705366] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.172s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.705886] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2131.708297] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.666s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.708500] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.728958] env[62816]: INFO nova.scheduler.client.report [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocations for instance 4b2e9f7f-b090-4547-bdd0-d4516fcc7589 [ 2131.840505] env[62816]: DEBUG nova.network.neutron [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Updating instance_info_cache with network_info: [{"id": "10c56c5d-1763-4ce9-a994-84fe8819b463", "address": "fa:16:3e:3f:c9:4a", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10c56c5d-17", "ovs_interfaceid": "10c56c5d-1763-4ce9-a994-84fe8819b463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.212723] env[62816]: DEBUG nova.compute.utils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2132.216471] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2132.216666] env[62816]: DEBUG nova.network.neutron [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2132.236378] env[62816]: DEBUG oslo_concurrency.lockutils [None req-738595c7-97bd-40f1-895e-6f9433683e49 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "4b2e9f7f-b090-4547-bdd0-d4516fcc7589" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.151s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.281375] env[62816]: DEBUG nova.policy [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3f53618eedbd4be28d440e1cbd81a8fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '53b24724dc3344f0b4206a015e34f2e4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2132.340658] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updating instance_info_cache with network_info: [{"id": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "address": "fa:16:3e:2a:d8:38", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465cd9c4-6d", "ovs_interfaceid": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.342996] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-61a134d9-e02d-48ca-a800-bcd0a19228ec" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2132.343313] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Instance network_info: |[{"id": "10c56c5d-1763-4ce9-a994-84fe8819b463", "address": "fa:16:3e:3f:c9:4a", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10c56c5d-17", "ovs_interfaceid": "10c56c5d-1763-4ce9-a994-84fe8819b463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2132.343816] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:c9:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '10c56c5d-1763-4ce9-a994-84fe8819b463', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2132.351751] env[62816]: DEBUG oslo.service.loopingcall [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2132.352728] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2132.353074] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-814cba0a-a4b8-4ffe-ac7c-650e9fdb7a18 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.368524] env[62816]: DEBUG nova.network.neutron [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [{"id": "9660e770-0412-4b45-a580-9b940740fcd1", "address": "fa:16:3e:bf:d0:c3", "network": {"id": "fdc0ee12-57b6-4f0e-b1c9-a95cdb8b658d", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-2067141799-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47e070d1729247ff83b4ff6997b45385", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "15ff34f9-4b02-4be1-b433-3ec4bd1b37c2", "external-id": "nsx-vlan-transportzone-51", "segmentation_id": 51, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9660e770-04", "ovs_interfaceid": "9660e770-0412-4b45-a580-9b940740fcd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2132.376707] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2132.376707] env[62816]: value = "task-1789496" [ 2132.376707] env[62816]: _type = "Task" [ 2132.376707] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.387380] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789496, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2132.552471] env[62816]: DEBUG nova.network.neutron [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Successfully created port: f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2132.597250] env[62816]: DEBUG nova.compute.manager [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Received event network-changed-10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2132.597472] env[62816]: DEBUG nova.compute.manager [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Refreshing instance network info cache due to event network-changed-10c56c5d-1763-4ce9-a994-84fe8819b463. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2132.597689] env[62816]: DEBUG oslo_concurrency.lockutils [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] Acquiring lock "refresh_cache-61a134d9-e02d-48ca-a800-bcd0a19228ec" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.597926] env[62816]: DEBUG oslo_concurrency.lockutils [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] Acquired lock "refresh_cache-61a134d9-e02d-48ca-a800-bcd0a19228ec" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.598347] env[62816]: DEBUG nova.network.neutron [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Refreshing network info cache for port 10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2132.722751] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2132.843371] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2132.843693] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2132.843929] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.844137] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.844320] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.844504] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.844680] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.844891] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.845062] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2132.845258] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2132.870467] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Releasing lock "refresh_cache-0eea9110-9194-4d75-b9af-ba386d96c129" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2132.870719] env[62816]: DEBUG nova.objects.instance [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lazy-loading 'migration_context' on Instance uuid 0eea9110-9194-4d75-b9af-ba386d96c129 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2132.886429] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789496, 'name': CreateVM_Task, 'duration_secs': 0.318373} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2132.886538] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2132.887161] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.887329] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.887635] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2132.887871] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a825faf5-58b4-4eb0-aadd-f007ab3cbb56 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.892536] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2132.892536] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52df9aa6-ec78-399f-c1c8-a224ab0bd5be" [ 2132.892536] env[62816]: _type = "Task" [ 2132.892536] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2132.900042] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52df9aa6-ec78-399f-c1c8-a224ab0bd5be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.327610] env[62816]: DEBUG nova.network.neutron [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Updated VIF entry in instance network info cache for port 10c56c5d-1763-4ce9-a994-84fe8819b463. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2133.327993] env[62816]: DEBUG nova.network.neutron [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Updating instance_info_cache with network_info: [{"id": "10c56c5d-1763-4ce9-a994-84fe8819b463", "address": "fa:16:3e:3f:c9:4a", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap10c56c5d-17", "ovs_interfaceid": "10c56c5d-1763-4ce9-a994-84fe8819b463", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.348923] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.349210] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.349419] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.349602] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2133.350679] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271e9399-f566-4481-94b7-4649e313df72 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.358936] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d906ae-a30e-4a2f-b495-0391f0ecc22f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.372915] env[62816]: DEBUG nova.objects.base [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Object Instance<0eea9110-9194-4d75-b9af-ba386d96c129> lazy-loaded attributes: info_cache,migration_context {{(pid=62816) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2133.373753] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c353a9c5-a6dc-4c99-8709-119dbf4b69dc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.376424] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d5b5cce-7391-4d1f-a88b-257563ce67b2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.397490] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81119491-10b8-43a4-bb82-05d4b7ca024d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.403091] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5aeddfd4-7377-4cf5-9319-202126c891ff {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.409431] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2133.409431] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525cb077-96a3-00b9-a490-e5c2ea0b3a0b" [ 2133.409431] env[62816]: _type = "Task" [ 2133.409431] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.435563] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181190MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2133.435718] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.435894] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2133.437472] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52df9aa6-ec78-399f-c1c8-a224ab0bd5be, 'name': SearchDatastore_Task, 'duration_secs': 0.010153} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.440582] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.440814] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2133.441053] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2133.441205] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2133.441387] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2133.441629] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e92fc59d-0865-48a7-8ed4-84558454925e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.449016] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525cb077-96a3-00b9-a490-e5c2ea0b3a0b, 'name': SearchDatastore_Task, 'duration_secs': 0.006951} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.450113] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2133.450380] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2133.450552] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2133.451223] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00ac7166-a905-4044-ab02-3291d41796fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.455955] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2133.455955] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52434148-2abb-c749-d26e-0a6189496249" [ 2133.455955] env[62816]: _type = "Task" [ 2133.455955] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.463883] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52434148-2abb-c749-d26e-0a6189496249, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.730138] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2133.758443] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2133.758811] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2133.758980] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2133.759183] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2133.759335] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2133.759487] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2133.759699] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2133.759858] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2133.760038] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2133.760209] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2133.760384] env[62816]: DEBUG nova.virt.hardware [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2133.761245] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8122d6f9-9c77-4e35-a92e-54148e696dd4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.769251] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2343dbaf-66a1-4bf8-977f-a832b460368e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.830968] env[62816]: DEBUG oslo_concurrency.lockutils [req-54ce5972-b616-41e7-b2c2-446b38d14335 req-3b6d1153-575a-4a38-85a8-af02cd825387 service nova] Releasing lock "refresh_cache-61a134d9-e02d-48ca-a800-bcd0a19228ec" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2133.966616] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52434148-2abb-c749-d26e-0a6189496249, 'name': SearchDatastore_Task, 'duration_secs': 0.008441} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.968016] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b59d9331-39cf-4b6f-9786-a86d47c78619 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.973421] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2133.973421] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]528485d6-a46a-22ba-923f-ae23eeb53e08" [ 2133.973421] env[62816]: _type = "Task" [ 2133.973421] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.982341] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528485d6-a46a-22ba-923f-ae23eeb53e08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.027657] env[62816]: DEBUG nova.network.neutron [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Successfully updated port: f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2134.443618] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Applying migration context for instance 0eea9110-9194-4d75-b9af-ba386d96c129 as it has an incoming, in-progress migration 43475881-221d-4c93-9935-e564e18b4eef. Migration status is finished {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2134.444471] env[62816]: INFO nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating resource usage from migration 43475881-221d-4c93-9935-e564e18b4eef [ 2134.462711] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 30884afd-63d4-4a08-a59a-a9dcb4269dba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2134.462876] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Migration 43475881-221d-4c93-9935-e564e18b4eef is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2134.462999] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 0eea9110-9194-4d75-b9af-ba386d96c129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2134.463131] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 61a134d9-e02d-48ca-a800-bcd0a19228ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2134.463245] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 8048544b-8947-4f87-8932-9e53dcbf5712 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2134.463425] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2134.463552] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2134.485412] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]528485d6-a46a-22ba-923f-ae23eeb53e08, 'name': SearchDatastore_Task, 'duration_secs': 0.009395} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.485691] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.485944] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 61a134d9-e02d-48ca-a800-bcd0a19228ec/61a134d9-e02d-48ca-a800-bcd0a19228ec.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2134.486226] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5149402b-50cf-437e-bf0d-9db8f51f182e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.492281] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2134.492281] env[62816]: value = "task-1789497" [ 2134.492281] env[62816]: _type = "Task" [ 2134.492281] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.499932] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.529798] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "refresh_cache-8048544b-8947-4f87-8932-9e53dcbf5712" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2134.529985] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "refresh_cache-8048544b-8947-4f87-8932-9e53dcbf5712" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2134.530167] env[62816]: DEBUG nova.network.neutron [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2134.533955] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d636514b-070c-4acf-b13b-2dead5175ec3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.541356] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c546efa-f820-4084-92b5-f66b7e35a4d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.572066] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334b4233-f961-41b4-91cd-f91d463cfcb9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.579388] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bf0615-a385-4fc3-91cf-d27c83184402 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.592761] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2134.623719] env[62816]: DEBUG nova.compute.manager [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Received event network-vif-plugged-f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2134.623969] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] Acquiring lock "8048544b-8947-4f87-8932-9e53dcbf5712-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.624201] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] Lock "8048544b-8947-4f87-8932-9e53dcbf5712-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.624377] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] Lock "8048544b-8947-4f87-8932-9e53dcbf5712-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.624553] env[62816]: DEBUG nova.compute.manager [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] No waiting events found dispatching network-vif-plugged-f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2134.624721] env[62816]: WARNING nova.compute.manager [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Received unexpected event network-vif-plugged-f7710195-3108-4fc4-886c-66b592845487 for instance with vm_state building and task_state spawning. [ 2134.624932] env[62816]: DEBUG nova.compute.manager [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Received event network-changed-f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2134.625123] env[62816]: DEBUG nova.compute.manager [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Refreshing instance network info cache due to event network-changed-f7710195-3108-4fc4-886c-66b592845487. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2134.625297] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] Acquiring lock "refresh_cache-8048544b-8947-4f87-8932-9e53dcbf5712" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2135.002519] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.443002} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.002786] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 61a134d9-e02d-48ca-a800-bcd0a19228ec/61a134d9-e02d-48ca-a800-bcd0a19228ec.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2135.002996] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2135.003270] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fe2a03b-f949-4a5b-932e-02e84920ecbb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.009616] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2135.009616] env[62816]: value = "task-1789498" [ 2135.009616] env[62816]: _type = "Task" [ 2135.009616] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.017247] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789498, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.062028] env[62816]: DEBUG nova.network.neutron [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2135.095500] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2135.185195] env[62816]: DEBUG nova.network.neutron [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Updating instance_info_cache with network_info: [{"id": "f7710195-3108-4fc4-886c-66b592845487", "address": "fa:16:3e:76:c6:be", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7710195-31", "ovs_interfaceid": "f7710195-3108-4fc4-886c-66b592845487", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2135.519838] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789498, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06212} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.520143] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2135.520867] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a1edb6-5a5c-4d60-b161-8bbdc41f195b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.542011] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 61a134d9-e02d-48ca-a800-bcd0a19228ec/61a134d9-e02d-48ca-a800-bcd0a19228ec.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2135.542245] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef8280be-2947-4b2a-a49a-d103c5406f79 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.560318] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2135.560318] env[62816]: value = "task-1789499" [ 2135.560318] env[62816]: _type = "Task" [ 2135.560318] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.567583] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789499, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.601894] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2135.602102] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.166s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2135.602356] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.152s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2135.688249] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "refresh_cache-8048544b-8947-4f87-8932-9e53dcbf5712" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2135.688524] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Instance network_info: |[{"id": "f7710195-3108-4fc4-886c-66b592845487", "address": "fa:16:3e:76:c6:be", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7710195-31", "ovs_interfaceid": "f7710195-3108-4fc4-886c-66b592845487", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2135.688833] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] Acquired lock "refresh_cache-8048544b-8947-4f87-8932-9e53dcbf5712" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2135.689023] env[62816]: DEBUG nova.network.neutron [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Refreshing network info cache for port f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2135.690230] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:c6:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a1931669-8959-4e86-a603-e206bcf2b47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7710195-3108-4fc4-886c-66b592845487', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2135.697595] env[62816]: DEBUG oslo.service.loopingcall [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2135.698471] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2135.698697] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2c7e3bd-ee67-479f-8b90-f283d47b3817 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.718531] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2135.718531] env[62816]: value = "task-1789500" [ 2135.718531] env[62816]: _type = "Task" [ 2135.718531] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.726346] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789500, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.073763] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789499, 'name': ReconfigVM_Task, 'duration_secs': 0.268476} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.074213] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 61a134d9-e02d-48ca-a800-bcd0a19228ec/61a134d9-e02d-48ca-a800-bcd0a19228ec.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2136.075090] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7de14b7-be13-4d69-8b40-f469fa5eace6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.082397] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2136.082397] env[62816]: value = "task-1789501" [ 2136.082397] env[62816]: _type = "Task" [ 2136.082397] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.095521] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789501, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.179553] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fdde937-2fdc-4bc4-a58a-8747a79d400f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.187093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95d9496-3ba8-4220-81a4-d2d0ec9a86df {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.224750] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02d9477-abe1-4808-9eb1-5ceee4cd92bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.232818] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789500, 'name': CreateVM_Task} progress is 25%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.235796] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d50b3e-5f48-4b9a-91b1-f1fd3e6600fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.249042] env[62816]: DEBUG nova.compute.provider_tree [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2136.420228] env[62816]: DEBUG nova.network.neutron [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Updated VIF entry in instance network info cache for port f7710195-3108-4fc4-886c-66b592845487. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2136.420603] env[62816]: DEBUG nova.network.neutron [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Updating instance_info_cache with network_info: [{"id": "f7710195-3108-4fc4-886c-66b592845487", "address": "fa:16:3e:76:c6:be", "network": {"id": "e158bb2d-ee0f-4326-828e-96b27aa5c7d6", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1518049674-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "53b24724dc3344f0b4206a015e34f2e4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a1931669-8959-4e86-a603-e206bcf2b47a", "external-id": "nsx-vlan-transportzone-937", "segmentation_id": 937, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7710195-31", "ovs_interfaceid": "f7710195-3108-4fc4-886c-66b592845487", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2136.592344] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789501, 'name': Rename_Task, 'duration_secs': 0.137359} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.592640] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2136.592897] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-457083f7-a6d2-47ae-a665-90cd497327a1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.599143] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2136.599143] env[62816]: value = "task-1789502" [ 2136.599143] env[62816]: _type = "Task" [ 2136.599143] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.606226] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789502, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.730955] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789500, 'name': CreateVM_Task, 'duration_secs': 0.715369} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.731178] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2136.731857] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2136.732068] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2136.732412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2136.732676] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c2b932d-32f1-412c-a79d-4d1d26d69953 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.737116] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2136.737116] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5236ae9b-b6ef-3c64-274e-29b5aa824aca" [ 2136.737116] env[62816]: _type = "Task" [ 2136.737116] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.745875] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5236ae9b-b6ef-3c64-274e-29b5aa824aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.751741] env[62816]: DEBUG nova.scheduler.client.report [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2136.923774] env[62816]: DEBUG oslo_concurrency.lockutils [req-d6c2d896-3f26-4793-a115-ed93c7a0ce88 req-1eecc7f7-4db3-4628-af75-92aea67887b1 service nova] Releasing lock "refresh_cache-8048544b-8947-4f87-8932-9e53dcbf5712" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.109037] env[62816]: DEBUG oslo_vmware.api [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789502, 'name': PowerOnVM_Task, 'duration_secs': 0.479551} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.109181] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2137.109388] env[62816]: INFO nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Took 6.71 seconds to spawn the instance on the hypervisor. [ 2137.109569] env[62816]: DEBUG nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2137.110322] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec65dae-96c2-4771-bc5e-ddb1a62a9ac4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.247059] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5236ae9b-b6ef-3c64-274e-29b5aa824aca, 'name': SearchDatastore_Task, 'duration_secs': 0.009654} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.247372] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2137.247608] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2137.247840] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2137.247985] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2137.248178] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2137.248429] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6541fdd0-37a7-499e-bb87-aaf1a3d9b1cf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.259584] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2137.259760] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2137.260610] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2b5e7f5-4750-43bf-b97a-3328c81bb51e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.265620] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2137.265620] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52cb2994-3881-63a8-6a25-d9ab75139389" [ 2137.265620] env[62816]: _type = "Task" [ 2137.265620] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.273312] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb2994-3881-63a8-6a25-d9ab75139389, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.625331] env[62816]: INFO nova.compute.manager [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Took 14.11 seconds to build instance. [ 2137.776468] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52cb2994-3881-63a8-6a25-d9ab75139389, 'name': SearchDatastore_Task, 'duration_secs': 0.009049} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.777255] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc0c265c-6ff8-4b04-8a26-e94e247fd8ca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.782553] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2137.782553] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52700e22-5773-c21d-0a73-92e6d20945c8" [ 2137.782553] env[62816]: _type = "Task" [ 2137.782553] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2137.790441] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52700e22-5773-c21d-0a73-92e6d20945c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.293552] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52700e22-5773-c21d-0a73-92e6d20945c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010399} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.293834] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2138.294116] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 8048544b-8947-4f87-8932-9e53dcbf5712/8048544b-8947-4f87-8932-9e53dcbf5712.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2138.294380] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ab8ba3b0-918b-4e08-8a76-3cf49e7a2446 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.301114] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2138.301114] env[62816]: value = "task-1789503" [ 2138.301114] env[62816]: _type = "Task" [ 2138.301114] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2138.309385] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.810256] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.446009} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2138.810621] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 8048544b-8947-4f87-8932-9e53dcbf5712/8048544b-8947-4f87-8932-9e53dcbf5712.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2138.810697] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2138.810896] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5abe71d-677a-4a52-b59b-33900cf7abcf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.817452] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2138.817452] env[62816]: value = "task-1789504" [ 2138.817452] env[62816]: _type = "Task" [ 2138.817452] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2138.824449] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.015355] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2139.326846] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063034} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.327135] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2139.327907] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf831f2-dade-4c38-8007-358cf3bc8d35 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.349912] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 8048544b-8947-4f87-8932-9e53dcbf5712/8048544b-8947-4f87-8932-9e53dcbf5712.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2139.350154] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-768c7369-4a10-4597-854f-89fa71fd0bb4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.368591] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2139.368591] env[62816]: value = "task-1789505" [ 2139.368591] env[62816]: _type = "Task" [ 2139.368591] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.376789] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789505, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.882547] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789505, 'name': ReconfigVM_Task, 'duration_secs': 0.344939} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.882949] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 8048544b-8947-4f87-8932-9e53dcbf5712/8048544b-8947-4f87-8932-9e53dcbf5712.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2139.883757] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c588667-e8b2-4a6b-86a8-a4a8e73c0c5d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.890902] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2139.890902] env[62816]: value = "task-1789506" [ 2139.890902] env[62816]: _type = "Task" [ 2139.890902] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.899269] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789506, 'name': Rename_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.400362] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789506, 'name': Rename_Task, 'duration_secs': 0.150478} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.400683] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2140.400942] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d34d5576-630a-404f-9ad2-a843fdc6c528 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.406645] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2140.406645] env[62816]: value = "task-1789507" [ 2140.406645] env[62816]: _type = "Task" [ 2140.406645] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.413839] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.767297] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 5.165s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.917440] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789507, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.132154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8a4ebaf7-70ca-4fe8-8cca-313e10808f3f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.625s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.244019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "61a134d9-e02d-48ca-a800-bcd0a19228ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.244323] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.244540] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "61a134d9-e02d-48ca-a800-bcd0a19228ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2141.244725] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2141.244896] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.247220] env[62816]: INFO nova.compute.manager [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Terminating instance [ 2141.248959] env[62816]: DEBUG nova.compute.manager [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2141.249172] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2141.249984] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76883b6a-ab1c-41d5-983a-bc51709243b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.257460] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2141.257682] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1e8cc1e-b978-4d48-a5b9-940fddb4cd49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.264141] env[62816]: DEBUG oslo_vmware.api [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2141.264141] env[62816]: value = "task-1789508" [ 2141.264141] env[62816]: _type = "Task" [ 2141.264141] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.273194] env[62816]: DEBUG oslo_vmware.api [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789508, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.328841] env[62816]: INFO nova.scheduler.client.report [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocation for migration 43475881-221d-4c93-9935-e564e18b4eef [ 2141.417378] env[62816]: DEBUG oslo_vmware.api [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789507, 'name': PowerOnVM_Task, 'duration_secs': 0.6032} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.417623] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2141.417775] env[62816]: INFO nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Took 7.69 seconds to spawn the instance on the hypervisor. [ 2141.417951] env[62816]: DEBUG nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2141.418765] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5055c281-d372-4ed5-b64b-bf788749f855 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.774744] env[62816]: DEBUG oslo_vmware.api [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789508, 'name': PowerOffVM_Task, 'duration_secs': 0.165572} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.774998] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2141.775157] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2141.775442] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a6d8ce9-693f-4d37-b766-f29dcf56c5cd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.835417] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 11.272s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.855870] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2141.856122] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2141.856277] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] 61a134d9-e02d-48ca-a800-bcd0a19228ec {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2141.856550] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-644420e9-4611-4fe7-a1d2-4ebde83bb11c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.862920] env[62816]: DEBUG oslo_vmware.api [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2141.862920] env[62816]: value = "task-1789510" [ 2141.862920] env[62816]: _type = "Task" [ 2141.862920] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.870665] env[62816]: DEBUG oslo_vmware.api [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789510, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.935706] env[62816]: INFO nova.compute.manager [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Took 12.42 seconds to build instance. [ 2142.372657] env[62816]: DEBUG oslo_vmware.api [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789510, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160666} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.372929] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2142.373115] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2142.373294] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2142.373468] env[62816]: INFO nova.compute.manager [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2142.373805] env[62816]: DEBUG oslo.service.loopingcall [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2142.373888] env[62816]: DEBUG nova.compute.manager [-] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2142.373980] env[62816]: DEBUG nova.network.neutron [-] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2142.439056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f8ad2546-771b-4abc-b2b8-83ff3c2e71c9 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "8048544b-8947-4f87-8932-9e53dcbf5712" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.932s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.652125] env[62816]: DEBUG nova.compute.manager [req-01795ae6-d5bc-4c94-927a-1b79bb23c4e9 req-2167a71d-8590-4cdf-9db1-34827e05d0f2 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Received event network-vif-deleted-10c56c5d-1763-4ce9-a994-84fe8819b463 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2142.652125] env[62816]: INFO nova.compute.manager [req-01795ae6-d5bc-4c94-927a-1b79bb23c4e9 req-2167a71d-8590-4cdf-9db1-34827e05d0f2 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Neutron deleted interface 10c56c5d-1763-4ce9-a994-84fe8819b463; detaching it from the instance and deleting it from the info cache [ 2142.652125] env[62816]: DEBUG nova.network.neutron [req-01795ae6-d5bc-4c94-927a-1b79bb23c4e9 req-2167a71d-8590-4cdf-9db1-34827e05d0f2 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.091476] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "0eea9110-9194-4d75-b9af-ba386d96c129" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.091878] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.091878] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.092071] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.092239] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.094794] env[62816]: INFO nova.compute.manager [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Terminating instance [ 2143.096744] env[62816]: DEBUG nova.compute.manager [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2143.096946] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2143.097800] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be726130-8fa3-4c62-a5bc-b0d07ef4691f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.105571] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2143.105571] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5bef3324-5712-43ed-8e64-909d7066396d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.112783] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2143.112783] env[62816]: value = "task-1789511" [ 2143.112783] env[62816]: _type = "Task" [ 2143.112783] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.120315] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.121671] env[62816]: DEBUG nova.network.neutron [-] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2143.155454] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65fb878e-53e5-4b65-bb86-47dd08df98e0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.165270] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cfaa0b-df50-4379-aac9-852fd87fdcd3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.192588] env[62816]: DEBUG nova.compute.manager [req-01795ae6-d5bc-4c94-927a-1b79bb23c4e9 req-2167a71d-8590-4cdf-9db1-34827e05d0f2 service nova] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Detach interface failed, port_id=10c56c5d-1763-4ce9-a994-84fe8819b463, reason: Instance 61a134d9-e02d-48ca-a800-bcd0a19228ec could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2143.201361] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "8048544b-8947-4f87-8932-9e53dcbf5712" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.201671] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "8048544b-8947-4f87-8932-9e53dcbf5712" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.201902] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "8048544b-8947-4f87-8932-9e53dcbf5712-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2143.202101] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "8048544b-8947-4f87-8932-9e53dcbf5712-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.202270] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "8048544b-8947-4f87-8932-9e53dcbf5712-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2143.204556] env[62816]: INFO nova.compute.manager [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Terminating instance [ 2143.206756] env[62816]: DEBUG nova.compute.manager [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2143.206949] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2143.207774] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70430e82-d093-4a1e-9555-69938428988a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.215293] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2143.215543] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-271313fd-5591-4200-9f92-d5d71575bfe0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.222273] env[62816]: DEBUG oslo_vmware.api [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2143.222273] env[62816]: value = "task-1789512" [ 2143.222273] env[62816]: _type = "Task" [ 2143.222273] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.229814] env[62816]: DEBUG oslo_vmware.api [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.623545] env[62816]: INFO nova.compute.manager [-] [instance: 61a134d9-e02d-48ca-a800-bcd0a19228ec] Took 1.25 seconds to deallocate network for instance. [ 2143.623899] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789511, 'name': PowerOffVM_Task, 'duration_secs': 0.212512} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.625595] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2143.625778] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2143.628456] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70e77259-2135-4d38-bd1e-25812e3aa110 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.699881] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2143.700142] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2143.700305] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleting the datastore file [datastore1] 0eea9110-9194-4d75-b9af-ba386d96c129 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2143.700564] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a0a33dec-b1ec-4076-8619-ac0342c703a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.707660] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for the task: (returnval){ [ 2143.707660] env[62816]: value = "task-1789514" [ 2143.707660] env[62816]: _type = "Task" [ 2143.707660] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.715195] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.731364] env[62816]: DEBUG oslo_vmware.api [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789512, 'name': PowerOffVM_Task, 'duration_secs': 0.164695} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.731631] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2143.731814] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2143.732079] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc968973-9046-4f03-ba89-99f6f3fd38fa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.799659] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2143.799949] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2143.800202] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleting the datastore file [datastore1] 8048544b-8947-4f87-8932-9e53dcbf5712 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2143.800536] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1596858-3d46-47c1-a2bf-89ec7819e728 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.808218] env[62816]: DEBUG oslo_vmware.api [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for the task: (returnval){ [ 2143.808218] env[62816]: value = "task-1789516" [ 2143.808218] env[62816]: _type = "Task" [ 2143.808218] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.817559] env[62816]: DEBUG oslo_vmware.api [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.131896] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2144.132245] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2144.132472] env[62816]: DEBUG nova.objects.instance [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid 61a134d9-e02d-48ca-a800-bcd0a19228ec {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2144.217916] env[62816]: DEBUG oslo_vmware.api [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Task: {'id': task-1789514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137828} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.218212] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2144.218403] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2144.218586] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2144.218760] env[62816]: INFO nova.compute.manager [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2144.218995] env[62816]: DEBUG oslo.service.loopingcall [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2144.219207] env[62816]: DEBUG nova.compute.manager [-] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2144.219302] env[62816]: DEBUG nova.network.neutron [-] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2144.318400] env[62816]: DEBUG oslo_vmware.api [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Task: {'id': task-1789516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.135104} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.318670] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2144.318860] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2144.319062] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2144.319241] env[62816]: INFO nova.compute.manager [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2144.319484] env[62816]: DEBUG oslo.service.loopingcall [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2144.319733] env[62816]: DEBUG nova.compute.manager [-] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2144.319861] env[62816]: DEBUG nova.network.neutron [-] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2144.544993] env[62816]: DEBUG nova.compute.manager [req-20020c25-4430-47f4-a749-4d0acc0e1a00 req-f8999b07-b310-4b6d-b7ff-e527a8e221ef service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Received event network-vif-deleted-f7710195-3108-4fc4-886c-66b592845487 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2144.545223] env[62816]: INFO nova.compute.manager [req-20020c25-4430-47f4-a749-4d0acc0e1a00 req-f8999b07-b310-4b6d-b7ff-e527a8e221ef service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Neutron deleted interface f7710195-3108-4fc4-886c-66b592845487; detaching it from the instance and deleting it from the info cache [ 2144.545223] env[62816]: DEBUG nova.network.neutron [req-20020c25-4430-47f4-a749-4d0acc0e1a00 req-f8999b07-b310-4b6d-b7ff-e527a8e221ef service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.677541] env[62816]: DEBUG nova.compute.manager [req-59ee280b-8493-4096-aad6-c0e5ee70ad32 req-d97cf4e1-e637-43f4-bf89-b06ad884b064 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Received event network-vif-deleted-9660e770-0412-4b45-a580-9b940740fcd1 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2144.677844] env[62816]: INFO nova.compute.manager [req-59ee280b-8493-4096-aad6-c0e5ee70ad32 req-d97cf4e1-e637-43f4-bf89-b06ad884b064 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Neutron deleted interface 9660e770-0412-4b45-a580-9b940740fcd1; detaching it from the instance and deleting it from the info cache [ 2144.678116] env[62816]: DEBUG nova.network.neutron [req-59ee280b-8493-4096-aad6-c0e5ee70ad32 req-d97cf4e1-e637-43f4-bf89-b06ad884b064 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.713342] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3625ee7-8a53-4e31-8f95-b2a27552703e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.721685] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0790cc-7164-455f-9cb5-b099bc48690e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.751939] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4612b8f0-0147-4eff-a7a4-151fe35abfc5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.759222] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb81dd92-6917-45cf-babc-8ad4a44434f3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.772332] env[62816]: DEBUG nova.compute.provider_tree [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2144.915385] env[62816]: DEBUG nova.network.neutron [-] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.023060] env[62816]: DEBUG nova.network.neutron [-] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.048683] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef5a3321-0795-4634-98d0-49a89c778057 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.058154] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4205f5-d661-4449-a59e-b87ec6e6112f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.082624] env[62816]: DEBUG nova.compute.manager [req-20020c25-4430-47f4-a749-4d0acc0e1a00 req-f8999b07-b310-4b6d-b7ff-e527a8e221ef service nova] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Detach interface failed, port_id=f7710195-3108-4fc4-886c-66b592845487, reason: Instance 8048544b-8947-4f87-8932-9e53dcbf5712 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2145.181523] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd70cc70-fa05-4b8c-bc20-2a7dcc676122 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.190437] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1939bf-a3d0-4b97-ae47-3718d7bef33f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.213538] env[62816]: DEBUG nova.compute.manager [req-59ee280b-8493-4096-aad6-c0e5ee70ad32 req-d97cf4e1-e637-43f4-bf89-b06ad884b064 service nova] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Detach interface failed, port_id=9660e770-0412-4b45-a580-9b940740fcd1, reason: Instance 0eea9110-9194-4d75-b9af-ba386d96c129 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2145.292085] env[62816]: ERROR nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [req-b95a38c8-79fd-4035-b96b-995e576bde43] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b95a38c8-79fd-4035-b96b-995e576bde43"}]} [ 2145.307740] env[62816]: DEBUG nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2145.321330] env[62816]: DEBUG nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2145.321560] env[62816]: DEBUG nova.compute.provider_tree [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2145.333408] env[62816]: DEBUG nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2145.351020] env[62816]: DEBUG nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2145.403989] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e2e942-ae4d-4c64-9868-8b2953a37e6e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.411916] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1efbd7f-0b1c-46b4-ae56-40a9a895f98b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.439979] env[62816]: INFO nova.compute.manager [-] [instance: 0eea9110-9194-4d75-b9af-ba386d96c129] Took 1.22 seconds to deallocate network for instance. [ 2145.442146] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79f1342-afc8-4f76-b576-39e6bbcea24f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.453037] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17903b88-54fc-427c-9e94-209342cfe86d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.466197] env[62816]: DEBUG nova.compute.provider_tree [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2145.525591] env[62816]: INFO nova.compute.manager [-] [instance: 8048544b-8947-4f87-8932-9e53dcbf5712] Took 1.21 seconds to deallocate network for instance. [ 2145.949874] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.994906] env[62816]: DEBUG nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 176 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2145.995204] env[62816]: DEBUG nova.compute.provider_tree [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 176 to 177 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2145.995423] env[62816]: DEBUG nova.compute.provider_tree [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2146.031096] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2146.501319] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.369s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.503574] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.554s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.503770] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.505853] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.475s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2146.506091] env[62816]: DEBUG nova.objects.instance [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lazy-loading 'resources' on Instance uuid 8048544b-8947-4f87-8932-9e53dcbf5712 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2146.519399] env[62816]: INFO nova.scheduler.client.report [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance 61a134d9-e02d-48ca-a800-bcd0a19228ec [ 2146.523156] env[62816]: INFO nova.scheduler.client.report [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Deleted allocations for instance 0eea9110-9194-4d75-b9af-ba386d96c129 [ 2147.028882] env[62816]: DEBUG oslo_concurrency.lockutils [None req-08f1cdba-7f59-463f-8594-c3d993c61001 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "61a134d9-e02d-48ca-a800-bcd0a19228ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.784s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.032099] env[62816]: DEBUG oslo_concurrency.lockutils [None req-8f2f528d-40c1-4436-b5ec-5e2384da13e8 tempest-DeleteServersTestJSON-1959749629 tempest-DeleteServersTestJSON-1959749629-project-member] Lock "0eea9110-9194-4d75-b9af-ba386d96c129" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.940s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2147.050592] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f9fc9b-524d-4344-87ed-7dd5f718cf2d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.058498] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14b677b-8557-489f-b204-a110b3671d50 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.088962] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244d6148-7236-415a-af9b-c787efbbd060 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.095993] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a3b9d23-2832-4734-b58a-1654222d9aa1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.110481] env[62816]: DEBUG nova.compute.provider_tree [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2147.613436] env[62816]: DEBUG nova.scheduler.client.report [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2148.118018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.612s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.136118] env[62816]: INFO nova.scheduler.client.report [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Deleted allocations for instance 8048544b-8947-4f87-8932-9e53dcbf5712 [ 2148.324173] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2148.324412] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2148.643789] env[62816]: DEBUG oslo_concurrency.lockutils [None req-db5dd181-2e42-44cf-8fc5-df7d2e0155f0 tempest-ServerDiskConfigTestJSON-1404836711 tempest-ServerDiskConfigTestJSON-1404836711-project-member] Lock "8048544b-8947-4f87-8932-9e53dcbf5712" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.442s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.826970] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2149.348798] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.349139] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2149.350739] env[62816]: INFO nova.compute.claims [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2150.401147] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e867fd17-c780-44ce-87df-d604ae42666f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.409080] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12834fff-8320-4231-8e32-cdad5a56f31c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.439920] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122d7353-8532-4138-b011-40905fced37c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.447311] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb92b386-79fb-46b7-8598-d2ea9a9b7cf2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.460295] env[62816]: DEBUG nova.compute.provider_tree [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2150.964129] env[62816]: DEBUG nova.scheduler.client.report [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2151.472951] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.124s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2151.473783] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2151.979656] env[62816]: DEBUG nova.compute.utils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2151.980766] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2151.980932] env[62816]: DEBUG nova.network.neutron [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2152.037944] env[62816]: DEBUG nova.policy [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2152.459478] env[62816]: DEBUG nova.network.neutron [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Successfully created port: 7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2152.485199] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2153.495490] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2153.525032] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2153.525556] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2153.525696] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2153.525824] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2153.526040] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2153.526259] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2153.526533] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2153.526712] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2153.526893] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2153.527076] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2153.527261] env[62816]: DEBUG nova.virt.hardware [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2153.528232] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b3d5fd-9314-4b68-bbf5-c679a63da731 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.536710] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1361bfd1-a4d0-4c45-b592-75d48e87a8a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.925406] env[62816]: DEBUG nova.compute.manager [req-21597e8f-fa71-4000-b913-ab47e9585d49 req-591f7afe-2bfc-48fa-89d7-ee955264f817 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Received event network-vif-plugged-7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2153.925636] env[62816]: DEBUG oslo_concurrency.lockutils [req-21597e8f-fa71-4000-b913-ab47e9585d49 req-591f7afe-2bfc-48fa-89d7-ee955264f817 service nova] Acquiring lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.925697] env[62816]: DEBUG oslo_concurrency.lockutils [req-21597e8f-fa71-4000-b913-ab47e9585d49 req-591f7afe-2bfc-48fa-89d7-ee955264f817 service nova] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2153.925822] env[62816]: DEBUG oslo_concurrency.lockutils [req-21597e8f-fa71-4000-b913-ab47e9585d49 req-591f7afe-2bfc-48fa-89d7-ee955264f817 service nova] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2153.926131] env[62816]: DEBUG nova.compute.manager [req-21597e8f-fa71-4000-b913-ab47e9585d49 req-591f7afe-2bfc-48fa-89d7-ee955264f817 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] No waiting events found dispatching network-vif-plugged-7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2153.926326] env[62816]: WARNING nova.compute.manager [req-21597e8f-fa71-4000-b913-ab47e9585d49 req-591f7afe-2bfc-48fa-89d7-ee955264f817 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Received unexpected event network-vif-plugged-7a1882c8-6bed-48d0-9d21-9dcad9a84185 for instance with vm_state building and task_state spawning. [ 2154.395845] env[62816]: DEBUG nova.network.neutron [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Successfully updated port: 7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2154.426971] env[62816]: DEBUG nova.compute.manager [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Received event network-changed-7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2154.427187] env[62816]: DEBUG nova.compute.manager [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Refreshing instance network info cache due to event network-changed-7a1882c8-6bed-48d0-9d21-9dcad9a84185. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2154.427507] env[62816]: DEBUG oslo_concurrency.lockutils [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] Acquiring lock "refresh_cache-d33caac0-1d1d-4e05-a263-3fd25a1a1535" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.427570] env[62816]: DEBUG oslo_concurrency.lockutils [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] Acquired lock "refresh_cache-d33caac0-1d1d-4e05-a263-3fd25a1a1535" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.427699] env[62816]: DEBUG nova.network.neutron [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Refreshing network info cache for port 7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2154.898852] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-d33caac0-1d1d-4e05-a263-3fd25a1a1535" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.963872] env[62816]: DEBUG nova.network.neutron [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2155.033532] env[62816]: DEBUG nova.network.neutron [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.536135] env[62816]: DEBUG oslo_concurrency.lockutils [req-3c5ec727-280c-4b85-b8cd-557ba3795561 req-b55288a2-3f9b-427a-99e2-e3b611b6c771 service nova] Releasing lock "refresh_cache-d33caac0-1d1d-4e05-a263-3fd25a1a1535" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2155.536528] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-d33caac0-1d1d-4e05-a263-3fd25a1a1535" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2155.536697] env[62816]: DEBUG nova.network.neutron [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2156.066973] env[62816]: DEBUG nova.network.neutron [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2156.190839] env[62816]: DEBUG nova.network.neutron [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Updating instance_info_cache with network_info: [{"id": "7a1882c8-6bed-48d0-9d21-9dcad9a84185", "address": "fa:16:3e:16:fc:0b", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1882c8-6b", "ovs_interfaceid": "7a1882c8-6bed-48d0-9d21-9dcad9a84185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2156.693733] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-d33caac0-1d1d-4e05-a263-3fd25a1a1535" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2156.694057] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Instance network_info: |[{"id": "7a1882c8-6bed-48d0-9d21-9dcad9a84185", "address": "fa:16:3e:16:fc:0b", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7a1882c8-6b", "ovs_interfaceid": "7a1882c8-6bed-48d0-9d21-9dcad9a84185", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2156.694515] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:fc:0b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7a1882c8-6bed-48d0-9d21-9dcad9a84185', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2156.701853] env[62816]: DEBUG oslo.service.loopingcall [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2156.702080] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2156.702305] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc507a0f-e071-447d-82ed-7aecc4422f03 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.722872] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2156.722872] env[62816]: value = "task-1789518" [ 2156.722872] env[62816]: _type = "Task" [ 2156.722872] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2156.729884] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789518, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.233361] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789518, 'name': CreateVM_Task, 'duration_secs': 0.345885} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2157.233675] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2157.234174] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.234345] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2157.234657] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2157.234896] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39ad4355-b373-496f-8afa-cd3ec00cf8b8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.239252] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2157.239252] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]520866c5-8a76-37d1-087a-5368f87fd936" [ 2157.239252] env[62816]: _type = "Task" [ 2157.239252] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.246938] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520866c5-8a76-37d1-087a-5368f87fd936, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.749359] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]520866c5-8a76-37d1-087a-5368f87fd936, 'name': SearchDatastore_Task, 'duration_secs': 0.010377} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2157.749671] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2157.749908] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2157.750150] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.750301] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2157.750483] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2157.750735] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2b2e56cd-5541-49e9-b61a-2f251285cdaa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.759243] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2157.759384] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2157.760052] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4231fb38-8d36-4ead-b892-4c4831c47915 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.764851] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2157.764851] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52149b75-1994-6a5e-10be-8cb0ae979be3" [ 2157.764851] env[62816]: _type = "Task" [ 2157.764851] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.771669] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52149b75-1994-6a5e-10be-8cb0ae979be3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.276021] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52149b75-1994-6a5e-10be-8cb0ae979be3, 'name': SearchDatastore_Task, 'duration_secs': 0.008158} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.276846] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-391ee2ae-f895-4a35-a3e9-9103b4cbbaf3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.281724] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2158.281724] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5299ce99-d892-1870-e50c-3d1d2db13ebd" [ 2158.281724] env[62816]: _type = "Task" [ 2158.281724] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.289127] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5299ce99-d892-1870-e50c-3d1d2db13ebd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.793739] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5299ce99-d892-1870-e50c-3d1d2db13ebd, 'name': SearchDatastore_Task, 'duration_secs': 0.009315} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.794013] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.794307] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d33caac0-1d1d-4e05-a263-3fd25a1a1535/d33caac0-1d1d-4e05-a263-3fd25a1a1535.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2158.794526] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1346426-8d05-49f7-9dbb-503ecbacd585 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.801353] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2158.801353] env[62816]: value = "task-1789519" [ 2158.801353] env[62816]: _type = "Task" [ 2158.801353] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.808996] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789519, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.311605] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789519, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.422961} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.311895] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] d33caac0-1d1d-4e05-a263-3fd25a1a1535/d33caac0-1d1d-4e05-a263-3fd25a1a1535.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2159.312107] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2159.312359] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d93024d7-a1e7-4752-9b37-d4169cd3ba09 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.318534] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2159.318534] env[62816]: value = "task-1789520" [ 2159.318534] env[62816]: _type = "Task" [ 2159.318534] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.325298] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789520, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.828046] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789520, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059792} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.828336] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2159.829093] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae910f10-660c-4ea2-9257-73d2212ee132 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.850022] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] d33caac0-1d1d-4e05-a263-3fd25a1a1535/d33caac0-1d1d-4e05-a263-3fd25a1a1535.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2159.850214] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71309af9-b013-4ec7-a636-101e40725009 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.868652] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2159.868652] env[62816]: value = "task-1789521" [ 2159.868652] env[62816]: _type = "Task" [ 2159.868652] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.875687] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789521, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.379871] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789521, 'name': ReconfigVM_Task, 'duration_secs': 0.279204} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.382936] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Reconfigured VM instance instance-00000075 to attach disk [datastore1] d33caac0-1d1d-4e05-a263-3fd25a1a1535/d33caac0-1d1d-4e05-a263-3fd25a1a1535.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2160.382936] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11375a74-10c7-4fd0-ab11-61b197134dbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.388025] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2160.388025] env[62816]: value = "task-1789522" [ 2160.388025] env[62816]: _type = "Task" [ 2160.388025] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.395110] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789522, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.897629] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789522, 'name': Rename_Task, 'duration_secs': 0.137013} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.897904] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2160.898160] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4e43492-20b5-4367-92ee-74a447dfa637 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.904357] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2160.904357] env[62816]: value = "task-1789523" [ 2160.904357] env[62816]: _type = "Task" [ 2160.904357] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.911248] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789523, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.414455] env[62816]: DEBUG oslo_vmware.api [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789523, 'name': PowerOnVM_Task, 'duration_secs': 0.463423} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.414860] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2161.414914] env[62816]: INFO nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Took 7.92 seconds to spawn the instance on the hypervisor. [ 2161.415093] env[62816]: DEBUG nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2161.415865] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6b4c81-670c-46a9-8646-1fd8fa377469 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.934069] env[62816]: INFO nova.compute.manager [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Took 12.60 seconds to build instance. [ 2162.436571] env[62816]: DEBUG oslo_concurrency.lockutils [None req-e6c77105-0035-4829-aeb3-a376448e2d3c tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.112s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.850060] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.850060] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.850060] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.850060] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.850387] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.852443] env[62816]: INFO nova.compute.manager [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Terminating instance [ 2162.854255] env[62816]: DEBUG nova.compute.manager [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2162.854442] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2162.855317] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c834014-df6d-4d8d-af30-93a229ff01cb {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.863736] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2162.863914] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1747c61-efd3-4da4-a8ed-767ad700e39b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.870087] env[62816]: DEBUG oslo_vmware.api [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2162.870087] env[62816]: value = "task-1789524" [ 2162.870087] env[62816]: _type = "Task" [ 2162.870087] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2162.877391] env[62816]: DEBUG oslo_vmware.api [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789524, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.380124] env[62816]: DEBUG oslo_vmware.api [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789524, 'name': PowerOffVM_Task, 'duration_secs': 0.189705} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.380385] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2163.380557] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2163.380808] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7722769b-8bca-4025-b399-7a1ed2c49253 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.450596] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2163.450842] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2163.450991] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] d33caac0-1d1d-4e05-a263-3fd25a1a1535 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2163.451264] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d19a76e3-e160-401f-a1c1-46544d1f0b02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2163.457898] env[62816]: DEBUG oslo_vmware.api [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2163.457898] env[62816]: value = "task-1789526" [ 2163.457898] env[62816]: _type = "Task" [ 2163.457898] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2163.465108] env[62816]: DEBUG oslo_vmware.api [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2163.967932] env[62816]: DEBUG oslo_vmware.api [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122816} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2163.968199] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2163.968388] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2163.968567] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2163.968738] env[62816]: INFO nova.compute.manager [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2163.968972] env[62816]: DEBUG oslo.service.loopingcall [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2163.969182] env[62816]: DEBUG nova.compute.manager [-] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2163.969278] env[62816]: DEBUG nova.network.neutron [-] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2164.190613] env[62816]: DEBUG nova.compute.manager [req-4fc0d6c5-234e-4590-8656-6084f07902ab req-a6ffcab1-f6d8-40f9-bb59-f2c52df47fcc service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Received event network-vif-deleted-7a1882c8-6bed-48d0-9d21-9dcad9a84185 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2164.190897] env[62816]: INFO nova.compute.manager [req-4fc0d6c5-234e-4590-8656-6084f07902ab req-a6ffcab1-f6d8-40f9-bb59-f2c52df47fcc service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Neutron deleted interface 7a1882c8-6bed-48d0-9d21-9dcad9a84185; detaching it from the instance and deleting it from the info cache [ 2164.191163] env[62816]: DEBUG nova.network.neutron [req-4fc0d6c5-234e-4590-8656-6084f07902ab req-a6ffcab1-f6d8-40f9-bb59-f2c52df47fcc service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2164.670380] env[62816]: DEBUG nova.network.neutron [-] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2164.694482] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-caae617e-2378-491c-a422-62b5dc55cedd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.703807] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6094d51f-8844-4f0e-aad2-caabc1a8a11f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.727549] env[62816]: DEBUG nova.compute.manager [req-4fc0d6c5-234e-4590-8656-6084f07902ab req-a6ffcab1-f6d8-40f9-bb59-f2c52df47fcc service nova] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Detach interface failed, port_id=7a1882c8-6bed-48d0-9d21-9dcad9a84185, reason: Instance d33caac0-1d1d-4e05-a263-3fd25a1a1535 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2165.173528] env[62816]: INFO nova.compute.manager [-] [instance: d33caac0-1d1d-4e05-a263-3fd25a1a1535] Took 1.20 seconds to deallocate network for instance. [ 2165.679275] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.679990] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.679990] env[62816]: DEBUG nova.objects.instance [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid d33caac0-1d1d-4e05-a263-3fd25a1a1535 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2166.222634] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-087ffd65-4194-48f8-84a8-c2430bacdaab {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.230333] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a433ba-a71c-4c4f-93cf-f2f6648cc015 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.259649] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72fa7b0-c701-455c-872a-0fa670bc3d02 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.266550] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4687c1be-e4fe-40be-b650-694dd01f3f71 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.279020] env[62816]: DEBUG nova.compute.provider_tree [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2166.797293] env[62816]: ERROR nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [req-7d3dbc1f-e085-49a6-ad88-346471e006ae] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7d3dbc1f-e085-49a6-ad88-346471e006ae"}]} [ 2166.813259] env[62816]: DEBUG nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2166.825530] env[62816]: DEBUG nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2166.825757] env[62816]: DEBUG nova.compute.provider_tree [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2166.835237] env[62816]: DEBUG nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2166.851023] env[62816]: DEBUG nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2166.879917] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31151157-bf41-49b4-87c9-9b8b0944aff7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.887522] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17d67b9-9806-42c9-968a-1c8405772eed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.917491] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86d0fef2-711d-4f58-8d57-a8675b585646 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.924189] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41902f9-e8fa-4916-9170-4612b91b857c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.936775] env[62816]: DEBUG nova.compute.provider_tree [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2167.467493] env[62816]: DEBUG nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 178 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2167.467826] env[62816]: DEBUG nova.compute.provider_tree [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 178 to 179 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2167.468054] env[62816]: DEBUG nova.compute.provider_tree [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2167.972752] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.293s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2167.992196] env[62816]: INFO nova.scheduler.client.report [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance d33caac0-1d1d-4e05-a263-3fd25a1a1535 [ 2168.500556] env[62816]: DEBUG oslo_concurrency.lockutils [None req-ee966f4e-7d6e-489d-bbf5-fd549e8d3e9d tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "d33caac0-1d1d-4e05-a263-3fd25a1a1535" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.651s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2169.767906] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "3e412acc-287a-4e66-b4f4-28d74de49d8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2169.768203] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.270426] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2170.790395] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2170.790659] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2170.792175] env[62816]: INFO nova.compute.claims [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2171.837054] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64bc92c4-77c6-46ff-bd83-19b779ac7bb7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.844036] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6871d22-7176-4f4e-bd28-c64565821fe9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.872829] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647a37b9-54c3-4dc4-b701-eed89336ee3b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.879511] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c462fdc-2190-4284-bf05-95fc5d950b5a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.892084] env[62816]: DEBUG nova.compute.provider_tree [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2172.395388] env[62816]: DEBUG nova.scheduler.client.report [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2172.900798] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.901376] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2173.406257] env[62816]: DEBUG nova.compute.utils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2173.407685] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2173.407883] env[62816]: DEBUG nova.network.neutron [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2173.455864] env[62816]: DEBUG nova.policy [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2173.700088] env[62816]: DEBUG nova.network.neutron [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Successfully created port: 90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2173.911772] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2174.921043] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2174.946877] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2174.947147] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2174.947311] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2174.947499] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2174.947647] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2174.947795] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2174.948016] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2174.948351] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2174.948513] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2174.948690] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2174.948869] env[62816]: DEBUG nova.virt.hardware [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2174.949859] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbea60b-9ae5-4542-a1ef-ecf3d0093471 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.957899] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c032147-6095-4a65-aa38-1f6f2462cfd2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.065381] env[62816]: DEBUG nova.compute.manager [req-b160d5c2-8769-43af-8304-bacd1587a216 req-d7a208b8-1543-40a9-a6b2-ac870c920cde service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Received event network-vif-plugged-90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2175.065611] env[62816]: DEBUG oslo_concurrency.lockutils [req-b160d5c2-8769-43af-8304-bacd1587a216 req-d7a208b8-1543-40a9-a6b2-ac870c920cde service nova] Acquiring lock "3e412acc-287a-4e66-b4f4-28d74de49d8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2175.065900] env[62816]: DEBUG oslo_concurrency.lockutils [req-b160d5c2-8769-43af-8304-bacd1587a216 req-d7a208b8-1543-40a9-a6b2-ac870c920cde service nova] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2175.065977] env[62816]: DEBUG oslo_concurrency.lockutils [req-b160d5c2-8769-43af-8304-bacd1587a216 req-d7a208b8-1543-40a9-a6b2-ac870c920cde service nova] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2175.066230] env[62816]: DEBUG nova.compute.manager [req-b160d5c2-8769-43af-8304-bacd1587a216 req-d7a208b8-1543-40a9-a6b2-ac870c920cde service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] No waiting events found dispatching network-vif-plugged-90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2175.066426] env[62816]: WARNING nova.compute.manager [req-b160d5c2-8769-43af-8304-bacd1587a216 req-d7a208b8-1543-40a9-a6b2-ac870c920cde service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Received unexpected event network-vif-plugged-90b50828-9370-443f-83e2-ba2bd00e40c6 for instance with vm_state building and task_state spawning. [ 2175.159542] env[62816]: DEBUG nova.network.neutron [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Successfully updated port: 90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2175.661965] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-3e412acc-287a-4e66-b4f4-28d74de49d8d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2175.662196] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-3e412acc-287a-4e66-b4f4-28d74de49d8d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2175.662362] env[62816]: DEBUG nova.network.neutron [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2176.194725] env[62816]: DEBUG nova.network.neutron [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2176.311774] env[62816]: DEBUG nova.network.neutron [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Updating instance_info_cache with network_info: [{"id": "90b50828-9370-443f-83e2-ba2bd00e40c6", "address": "fa:16:3e:38:71:b5", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b50828-93", "ovs_interfaceid": "90b50828-9370-443f-83e2-ba2bd00e40c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.814391] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-3e412acc-287a-4e66-b4f4-28d74de49d8d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2176.814717] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Instance network_info: |[{"id": "90b50828-9370-443f-83e2-ba2bd00e40c6", "address": "fa:16:3e:38:71:b5", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b50828-93", "ovs_interfaceid": "90b50828-9370-443f-83e2-ba2bd00e40c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2176.815179] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:71:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90b50828-9370-443f-83e2-ba2bd00e40c6', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2176.822765] env[62816]: DEBUG oslo.service.loopingcall [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2176.822972] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2176.823211] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-835f6919-9df9-4bad-a242-ab2db6cd7fec {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.843208] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2176.843208] env[62816]: value = "task-1789527" [ 2176.843208] env[62816]: _type = "Task" [ 2176.843208] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.850339] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789527, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.092425] env[62816]: DEBUG nova.compute.manager [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Received event network-changed-90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2177.092532] env[62816]: DEBUG nova.compute.manager [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Refreshing instance network info cache due to event network-changed-90b50828-9370-443f-83e2-ba2bd00e40c6. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2177.092721] env[62816]: DEBUG oslo_concurrency.lockutils [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] Acquiring lock "refresh_cache-3e412acc-287a-4e66-b4f4-28d74de49d8d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.092868] env[62816]: DEBUG oslo_concurrency.lockutils [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] Acquired lock "refresh_cache-3e412acc-287a-4e66-b4f4-28d74de49d8d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.093038] env[62816]: DEBUG nova.network.neutron [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Refreshing network info cache for port 90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2177.354843] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789527, 'name': CreateVM_Task, 'duration_secs': 0.291135} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.355126] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2177.355707] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.355875] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.356207] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2177.356457] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-320310e0-2542-49ec-82bd-8019fbb63841 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.361153] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2177.361153] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52f1bb56-03ab-8363-3f31-fe57511cbbf6" [ 2177.361153] env[62816]: _type = "Task" [ 2177.361153] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.368939] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f1bb56-03ab-8363-3f31-fe57511cbbf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.772148] env[62816]: DEBUG nova.network.neutron [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Updated VIF entry in instance network info cache for port 90b50828-9370-443f-83e2-ba2bd00e40c6. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2177.772512] env[62816]: DEBUG nova.network.neutron [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Updating instance_info_cache with network_info: [{"id": "90b50828-9370-443f-83e2-ba2bd00e40c6", "address": "fa:16:3e:38:71:b5", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90b50828-93", "ovs_interfaceid": "90b50828-9370-443f-83e2-ba2bd00e40c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.871278] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52f1bb56-03ab-8363-3f31-fe57511cbbf6, 'name': SearchDatastore_Task, 'duration_secs': 0.009154} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.871591] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.871822] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2177.872096] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2177.872222] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2177.872403] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2177.872669] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83cfcbb8-fe53-47e5-81d9-77acfcd868c6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.880037] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2177.880215] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2177.880865] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e77caef-d040-40f6-b07f-1d92636ec366 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.885635] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2177.885635] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52a54454-396c-ef1a-9160-add1875cadec" [ 2177.885635] env[62816]: _type = "Task" [ 2177.885635] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.892557] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a54454-396c-ef1a-9160-add1875cadec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.275429] env[62816]: DEBUG oslo_concurrency.lockutils [req-e92671b5-2a91-46d4-971a-50805ca5adfb req-80c5fa03-7e02-4fe8-afc0-9ba7dab05b7c service nova] Releasing lock "refresh_cache-3e412acc-287a-4e66-b4f4-28d74de49d8d" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.396270] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52a54454-396c-ef1a-9160-add1875cadec, 'name': SearchDatastore_Task, 'duration_secs': 0.007375} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.397082] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6de261c7-6e52-4a92-8348-57fb1d4f0dc1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.401918] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2178.401918] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52d4af4e-fa7b-e0db-59b7-ba4579bb75f1" [ 2178.401918] env[62816]: _type = "Task" [ 2178.401918] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.409126] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d4af4e-fa7b-e0db-59b7-ba4579bb75f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.912038] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52d4af4e-fa7b-e0db-59b7-ba4579bb75f1, 'name': SearchDatastore_Task, 'duration_secs': 0.008831} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.912191] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2178.912334] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3e412acc-287a-4e66-b4f4-28d74de49d8d/3e412acc-287a-4e66-b4f4-28d74de49d8d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2178.912601] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-17de869e-b995-4d47-b799-ee54fa227473 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.920080] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2178.920080] env[62816]: value = "task-1789528" [ 2178.920080] env[62816]: _type = "Task" [ 2178.920080] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.927600] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789528, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.429206] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789528, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.440996} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.429615] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 3e412acc-287a-4e66-b4f4-28d74de49d8d/3e412acc-287a-4e66-b4f4-28d74de49d8d.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2179.429696] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2179.429883] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0c62366-5198-4937-8dbd-989ef5b0a530 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.436042] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2179.436042] env[62816]: value = "task-1789529" [ 2179.436042] env[62816]: _type = "Task" [ 2179.436042] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.442635] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789529, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.945700] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789529, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058448} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.945964] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2179.946838] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea17ff3-fc51-4eec-b1d1-cc96d0d886ed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.968833] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 3e412acc-287a-4e66-b4f4-28d74de49d8d/3e412acc-287a-4e66-b4f4-28d74de49d8d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2179.969124] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f27df9d7-b102-4f6d-a87a-357336350af0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.987932] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2179.987932] env[62816]: value = "task-1789530" [ 2179.987932] env[62816]: _type = "Task" [ 2179.987932] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.995079] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789530, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.499650] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789530, 'name': ReconfigVM_Task, 'duration_secs': 0.254416} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.500093] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 3e412acc-287a-4e66-b4f4-28d74de49d8d/3e412acc-287a-4e66-b4f4-28d74de49d8d.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2180.500520] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9acf9b4-82ec-4614-96e8-5275c84a7678 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.507162] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2180.507162] env[62816]: value = "task-1789531" [ 2180.507162] env[62816]: _type = "Task" [ 2180.507162] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.514323] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789531, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.017045] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789531, 'name': Rename_Task, 'duration_secs': 0.127875} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.017322] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2181.017554] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb6397e0-1f7d-4f87-a66d-f4fa070ae166 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.023107] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2181.023107] env[62816]: value = "task-1789532" [ 2181.023107] env[62816]: _type = "Task" [ 2181.023107] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.030288] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789532, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.532883] env[62816]: DEBUG oslo_vmware.api [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789532, 'name': PowerOnVM_Task, 'duration_secs': 0.436554} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.533278] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2181.533336] env[62816]: INFO nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Took 6.61 seconds to spawn the instance on the hypervisor. [ 2181.533468] env[62816]: DEBUG nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2181.534200] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-528f342b-aa42-4e15-afd0-5f76778ff621 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.051387] env[62816]: INFO nova.compute.manager [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Took 11.28 seconds to build instance. [ 2182.554165] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7d7fcd94-e101-4e90-9fb8-061c65fcff13 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.786s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.172588] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "b27c5b09-d7e4-475c-9848-bab1375075b2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.172837] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.675764] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2184.204534] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.204800] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2184.206394] env[62816]: INFO nova.compute.claims [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2184.456212] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2185.330728] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d5bcb8-14bd-4677-9109-a9fb2c1af907 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.338374] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8111e435-7199-404d-a079-abe032f8c109 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.369404] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86c797c-a149-41e6-a221-eaca60f5feed {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.376691] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f6ec597-50d7-40ed-8ca8-0c32a59a4e28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.389443] env[62816]: DEBUG nova.compute.provider_tree [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2185.892412] env[62816]: DEBUG nova.scheduler.client.report [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2186.397842] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.398398] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2186.902917] env[62816]: DEBUG nova.compute.utils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2186.904205] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2186.904760] env[62816]: DEBUG nova.network.neutron [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2186.952688] env[62816]: DEBUG nova.policy [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2187.188626] env[62816]: DEBUG nova.network.neutron [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Successfully created port: 4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2187.407860] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2188.417982] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2188.444761] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2188.445048] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2188.445209] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2188.445421] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2188.445573] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2188.445720] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2188.445951] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2188.446128] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2188.446343] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2188.446534] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2188.446711] env[62816]: DEBUG nova.virt.hardware [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2188.447574] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635ba31f-d3eb-4561-92c5-c47bacbd5338 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.455408] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b135010c-525a-4e7d-8dec-19583b98deea {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.459382] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2188.459710] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2188.459710] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Rebuilding the list of instances to heal {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2188.552387] env[62816]: DEBUG nova.compute.manager [req-7746dbb8-7a6b-4a86-a55f-50d6ebfbe750 req-3f84cc95-58fb-47ee-b931-7eda37ed6d36 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Received event network-vif-plugged-4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2188.552607] env[62816]: DEBUG oslo_concurrency.lockutils [req-7746dbb8-7a6b-4a86-a55f-50d6ebfbe750 req-3f84cc95-58fb-47ee-b931-7eda37ed6d36 service nova] Acquiring lock "b27c5b09-d7e4-475c-9848-bab1375075b2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.552813] env[62816]: DEBUG oslo_concurrency.lockutils [req-7746dbb8-7a6b-4a86-a55f-50d6ebfbe750 req-3f84cc95-58fb-47ee-b931-7eda37ed6d36 service nova] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.552977] env[62816]: DEBUG oslo_concurrency.lockutils [req-7746dbb8-7a6b-4a86-a55f-50d6ebfbe750 req-3f84cc95-58fb-47ee-b931-7eda37ed6d36 service nova] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.553206] env[62816]: DEBUG nova.compute.manager [req-7746dbb8-7a6b-4a86-a55f-50d6ebfbe750 req-3f84cc95-58fb-47ee-b931-7eda37ed6d36 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] No waiting events found dispatching network-vif-plugged-4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2188.553362] env[62816]: WARNING nova.compute.manager [req-7746dbb8-7a6b-4a86-a55f-50d6ebfbe750 req-3f84cc95-58fb-47ee-b931-7eda37ed6d36 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Received unexpected event network-vif-plugged-4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 for instance with vm_state building and task_state spawning. [ 2188.634075] env[62816]: DEBUG nova.network.neutron [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Successfully updated port: 4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2188.963070] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Skipping network cache update for instance because it is Building. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2188.990984] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2188.991157] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquired lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2188.991313] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Forcefully refreshing network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2188.991470] env[62816]: DEBUG nova.objects.instance [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lazy-loading 'info_cache' on Instance uuid 30884afd-63d4-4a08-a59a-a9dcb4269dba {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2189.136663] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-b27c5b09-d7e4-475c-9848-bab1375075b2" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2189.136864] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-b27c5b09-d7e4-475c-9848-bab1375075b2" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2189.137012] env[62816]: DEBUG nova.network.neutron [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2189.668748] env[62816]: DEBUG nova.network.neutron [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2189.786494] env[62816]: DEBUG nova.network.neutron [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Updating instance_info_cache with network_info: [{"id": "4a9eddf4-57bf-4bca-a8f6-3bec1edbc941", "address": "fa:16:3e:e0:ba:15", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a9eddf4-57", "ovs_interfaceid": "4a9eddf4-57bf-4bca-a8f6-3bec1edbc941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.288954] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-b27c5b09-d7e4-475c-9848-bab1375075b2" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2190.289261] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Instance network_info: |[{"id": "4a9eddf4-57bf-4bca-a8f6-3bec1edbc941", "address": "fa:16:3e:e0:ba:15", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a9eddf4-57", "ovs_interfaceid": "4a9eddf4-57bf-4bca-a8f6-3bec1edbc941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2190.290043] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:ba:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a9eddf4-57bf-4bca-a8f6-3bec1edbc941', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2190.297438] env[62816]: DEBUG oslo.service.loopingcall [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2190.297631] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2190.297848] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14c9e545-e8c6-421a-8a7a-6e9875778c4e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.317808] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2190.317808] env[62816]: value = "task-1789533" [ 2190.317808] env[62816]: _type = "Task" [ 2190.317808] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.325254] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789533, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.580319] env[62816]: DEBUG nova.compute.manager [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Received event network-changed-4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2190.580517] env[62816]: DEBUG nova.compute.manager [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Refreshing instance network info cache due to event network-changed-4a9eddf4-57bf-4bca-a8f6-3bec1edbc941. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2190.580728] env[62816]: DEBUG oslo_concurrency.lockutils [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] Acquiring lock "refresh_cache-b27c5b09-d7e4-475c-9848-bab1375075b2" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2190.580870] env[62816]: DEBUG oslo_concurrency.lockutils [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] Acquired lock "refresh_cache-b27c5b09-d7e4-475c-9848-bab1375075b2" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2190.581037] env[62816]: DEBUG nova.network.neutron [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Refreshing network info cache for port 4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2190.695888] env[62816]: DEBUG nova.network.neutron [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updating instance_info_cache with network_info: [{"id": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "address": "fa:16:3e:2a:d8:38", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap465cd9c4-6d", "ovs_interfaceid": "465cd9c4-6d8e-4837-8b90-d36e77571bb6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2190.827563] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789533, 'name': CreateVM_Task, 'duration_secs': 0.309587} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.827732] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2190.828434] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2190.828614] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2190.828940] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2190.829211] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a29ab381-7df5-4ce6-90d4-e2f79ae342e1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.833635] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2190.833635] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]522e11d7-da81-1247-b5cf-bf77ee0a8c49" [ 2190.833635] env[62816]: _type = "Task" [ 2190.833635] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.841054] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522e11d7-da81-1247-b5cf-bf77ee0a8c49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.199019] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Releasing lock "refresh_cache-30884afd-63d4-4a08-a59a-a9dcb4269dba" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.199019] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updated the network info_cache for instance {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2191.199019] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.199019] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.199019] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.199019] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2191.199019] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2191.307605] env[62816]: DEBUG nova.network.neutron [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Updated VIF entry in instance network info cache for port 4a9eddf4-57bf-4bca-a8f6-3bec1edbc941. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2191.307981] env[62816]: DEBUG nova.network.neutron [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Updating instance_info_cache with network_info: [{"id": "4a9eddf4-57bf-4bca-a8f6-3bec1edbc941", "address": "fa:16:3e:e0:ba:15", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a9eddf4-57", "ovs_interfaceid": "4a9eddf4-57bf-4bca-a8f6-3bec1edbc941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2191.343476] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]522e11d7-da81-1247-b5cf-bf77ee0a8c49, 'name': SearchDatastore_Task, 'duration_secs': 0.008577} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.343769] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.343998] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2191.344250] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2191.344403] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2191.344585] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2191.344831] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb7fb195-2def-4543-8933-adc83b2c6cf7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.353348] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2191.353526] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2191.354211] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db663af7-6a21-4e3a-99eb-abcf115a7bc4 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.358716] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2191.358716] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5241a0a3-66f9-1f9f-11e8-b12d8b3cf0e3" [ 2191.358716] env[62816]: _type = "Task" [ 2191.358716] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.365754] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5241a0a3-66f9-1f9f-11e8-b12d8b3cf0e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.702149] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.702635] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.702635] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2191.702635] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2191.703576] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5868a10e-5993-4179-b1b1-d3b3eba0113b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.711835] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a154f6-d019-4f81-b2a2-3fd66b6c993a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.725019] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743640bd-7d41-4c43-8658-84d3aa6b371c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.731048] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-455f3929-65f2-4028-a70f-0e12f5eb948b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.759920] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181079MB free_disk=161GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2191.760079] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2191.760271] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2191.810869] env[62816]: DEBUG oslo_concurrency.lockutils [req-d23c2a53-0230-4c4a-84c8-ecbaed5f3c1a req-88d84609-8dbc-4a11-b5fc-92e38ff5627c service nova] Releasing lock "refresh_cache-b27c5b09-d7e4-475c-9848-bab1375075b2" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2191.868592] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5241a0a3-66f9-1f9f-11e8-b12d8b3cf0e3, 'name': SearchDatastore_Task, 'duration_secs': 0.00796} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.869349] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e663a4ab-6b60-47c6-82ae-29a43b608bdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.874306] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2191.874306] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52ac89a8-cff0-ceb8-40ce-a511fd0a3901" [ 2191.874306] env[62816]: _type = "Task" [ 2191.874306] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.881593] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ac89a8-cff0-ceb8-40ce-a511fd0a3901, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.384781] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52ac89a8-cff0-ceb8-40ce-a511fd0a3901, 'name': SearchDatastore_Task, 'duration_secs': 0.009122} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.385105] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2192.385403] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b27c5b09-d7e4-475c-9848-bab1375075b2/b27c5b09-d7e4-475c-9848-bab1375075b2.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2192.385663] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc648b94-09cd-4594-b6c8-625c301fddba {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.392943] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2192.392943] env[62816]: value = "task-1789534" [ 2192.392943] env[62816]: _type = "Task" [ 2192.392943] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.400222] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789534, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.786779] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 30884afd-63d4-4a08-a59a-a9dcb4269dba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2192.787153] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 3e412acc-287a-4e66-b4f4-28d74de49d8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2192.787153] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance b27c5b09-d7e4-475c-9848-bab1375075b2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2192.787253] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2192.787393] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2192.835981] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf9db95-7dde-4054-9779-82dc2dd98ff6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.843233] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc18cd7e-1140-41f1-bf84-14cf8005e994 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.871915] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d62cee-ee6e-47fd-8d3f-976d051e2e34 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.878617] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-872c5c0d-d190-45f1-958f-70d7b5c87e28 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.891893] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2192.900447] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789534, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.421752} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.901201] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] b27c5b09-d7e4-475c-9848-bab1375075b2/b27c5b09-d7e4-475c-9848-bab1375075b2.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2192.901419] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2192.901629] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85c9ad5c-c2c1-47dd-91ce-4963e4c92ea2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.907845] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2192.907845] env[62816]: value = "task-1789535" [ 2192.907845] env[62816]: _type = "Task" [ 2192.907845] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2192.914895] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789535, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.395554] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2193.417711] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789535, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070014} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.418038] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2193.418836] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9ab7fa-d927-4f1f-a719-7e3a4bb5ed25 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.441142] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] b27c5b09-d7e4-475c-9848-bab1375075b2/b27c5b09-d7e4-475c-9848-bab1375075b2.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2193.441459] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51dd9581-684f-431a-bb36-94f8793ebddd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.460917] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2193.460917] env[62816]: value = "task-1789536" [ 2193.460917] env[62816]: _type = "Task" [ 2193.460917] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.468446] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789536, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.900077] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2193.900609] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.140s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.971108] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789536, 'name': ReconfigVM_Task, 'duration_secs': 0.266667} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2193.971108] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Reconfigured VM instance instance-00000077 to attach disk [datastore1] b27c5b09-d7e4-475c-9848-bab1375075b2/b27c5b09-d7e4-475c-9848-bab1375075b2.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2193.971756] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4d9bb91-a0e9-4fc3-8718-9b4671ac3973 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.978130] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2193.978130] env[62816]: value = "task-1789537" [ 2193.978130] env[62816]: _type = "Task" [ 2193.978130] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.985274] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789537, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.487424] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789537, 'name': Rename_Task, 'duration_secs': 0.13125} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.487700] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2194.487924] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a724965-dd11-4ea3-b09d-1cb0d85107fc {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.494113] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2194.494113] env[62816]: value = "task-1789538" [ 2194.494113] env[62816]: _type = "Task" [ 2194.494113] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2194.501139] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789538, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.892602] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2194.892833] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2194.893066] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2195.004593] env[62816]: DEBUG oslo_vmware.api [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789538, 'name': PowerOnVM_Task, 'duration_secs': 0.435937} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2195.005077] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2195.005077] env[62816]: INFO nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Took 6.59 seconds to spawn the instance on the hypervisor. [ 2195.005272] env[62816]: DEBUG nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2195.006042] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af788ade-ee77-4568-bfaf-6f7ffc1ebc63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.523910] env[62816]: INFO nova.compute.manager [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Took 11.34 seconds to build instance. [ 2196.026142] env[62816]: DEBUG oslo_concurrency.lockutils [None req-031fb374-a0b7-49ba-bf45-91832e553894 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.853s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.532307] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "b27c5b09-d7e4-475c-9848-bab1375075b2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.532614] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.532816] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "b27c5b09-d7e4-475c-9848-bab1375075b2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2196.533018] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.533247] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.535809] env[62816]: INFO nova.compute.manager [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Terminating instance [ 2196.537597] env[62816]: DEBUG nova.compute.manager [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2196.537790] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2196.538626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2132a6a6-5882-49bc-a171-dc783d24fd2f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.546094] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2196.546325] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b77647e1-7025-4ee1-bf87-62375b6ed6a8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.552912] env[62816]: DEBUG oslo_vmware.api [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2196.552912] env[62816]: value = "task-1789539" [ 2196.552912] env[62816]: _type = "Task" [ 2196.552912] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2196.561287] env[62816]: DEBUG oslo_vmware.api [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789539, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.063600] env[62816]: DEBUG oslo_vmware.api [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789539, 'name': PowerOffVM_Task, 'duration_secs': 0.198957} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.064030] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2197.064030] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2197.064293] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fc4b802-9400-448b-8493-864cee236c0a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.183701] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2197.183943] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2197.184158] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] b27c5b09-d7e4-475c-9848-bab1375075b2 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2197.184442] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9beef36-6eaa-4c30-8c78-0d7a0736326c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.190900] env[62816]: DEBUG oslo_vmware.api [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2197.190900] env[62816]: value = "task-1789541" [ 2197.190900] env[62816]: _type = "Task" [ 2197.190900] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2197.198646] env[62816]: DEBUG oslo_vmware.api [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789541, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2197.700017] env[62816]: DEBUG oslo_vmware.api [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789541, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146608} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2197.700225] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2197.700422] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2197.700604] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2197.700779] env[62816]: INFO nova.compute.manager [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2197.701044] env[62816]: DEBUG oslo.service.loopingcall [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2197.701245] env[62816]: DEBUG nova.compute.manager [-] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2197.701345] env[62816]: DEBUG nova.network.neutron [-] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2197.936937] env[62816]: DEBUG nova.compute.manager [req-9b37022d-6d2b-4f18-9e2d-fa06d993f344 req-4d260e79-c7b4-4f25-a0dc-35d372a94197 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Received event network-vif-deleted-4a9eddf4-57bf-4bca-a8f6-3bec1edbc941 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2197.937075] env[62816]: INFO nova.compute.manager [req-9b37022d-6d2b-4f18-9e2d-fa06d993f344 req-4d260e79-c7b4-4f25-a0dc-35d372a94197 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Neutron deleted interface 4a9eddf4-57bf-4bca-a8f6-3bec1edbc941; detaching it from the instance and deleting it from the info cache [ 2197.937255] env[62816]: DEBUG nova.network.neutron [req-9b37022d-6d2b-4f18-9e2d-fa06d993f344 req-4d260e79-c7b4-4f25-a0dc-35d372a94197 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.410049] env[62816]: DEBUG nova.network.neutron [-] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.440218] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-89c6d75b-7acb-49bc-a000-d0df58967d2c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.449629] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e15aa7-f3d5-4598-8f62-5b529dded946 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.474084] env[62816]: DEBUG nova.compute.manager [req-9b37022d-6d2b-4f18-9e2d-fa06d993f344 req-4d260e79-c7b4-4f25-a0dc-35d372a94197 service nova] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Detach interface failed, port_id=4a9eddf4-57bf-4bca-a8f6-3bec1edbc941, reason: Instance b27c5b09-d7e4-475c-9848-bab1375075b2 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2198.911587] env[62816]: INFO nova.compute.manager [-] [instance: b27c5b09-d7e4-475c-9848-bab1375075b2] Took 1.21 seconds to deallocate network for instance. [ 2199.419181] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2199.419527] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2199.419774] env[62816]: DEBUG nova.objects.instance [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid b27c5b09-d7e4-475c-9848-bab1375075b2 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2199.973432] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8dfe23-3752-4732-93f8-91c66b55d531 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.980760] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a19f808-5dba-4a13-afc9-f1062213892e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.009830] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29881e2a-0042-4ddf-9c0e-65ada2a4aa39 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.016381] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50100560-21e1-4109-b045-548c7e0806a5 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.028875] env[62816]: DEBUG nova.compute.provider_tree [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2200.532124] env[62816]: DEBUG nova.scheduler.client.report [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2201.037392] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.060319] env[62816]: INFO nova.scheduler.client.report [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance b27c5b09-d7e4-475c-9848-bab1375075b2 [ 2201.567521] env[62816]: DEBUG oslo_concurrency.lockutils [None req-d78c1acb-ca1a-4b6f-8b80-4422d3598afa tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "b27c5b09-d7e4-475c-9848-bab1375075b2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.035s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.992843] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "3e412acc-287a-4e66-b4f4-28d74de49d8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.993098] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.993320] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "3e412acc-287a-4e66-b4f4-28d74de49d8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.993517] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.993688] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.995908] env[62816]: INFO nova.compute.manager [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Terminating instance [ 2201.997624] env[62816]: DEBUG nova.compute.manager [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2201.997818] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2201.998657] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbacd9d-17a1-484c-8094-c9f104925e6b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.006272] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2202.006489] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ba59069-0814-4715-a83c-9ec71fa0e049 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.013160] env[62816]: DEBUG oslo_vmware.api [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2202.013160] env[62816]: value = "task-1789542" [ 2202.013160] env[62816]: _type = "Task" [ 2202.013160] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.020458] env[62816]: DEBUG oslo_vmware.api [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789542, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.523483] env[62816]: DEBUG oslo_vmware.api [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789542, 'name': PowerOffVM_Task, 'duration_secs': 0.173727} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.523758] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2202.523931] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2202.524195] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab585125-52c3-4a0d-a4e9-343ca9665c59 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.596042] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2202.596438] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2202.596483] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] 3e412acc-287a-4e66-b4f4-28d74de49d8d {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2202.596721] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-04e7b502-0433-42f6-897b-96805724b71b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.603209] env[62816]: DEBUG oslo_vmware.api [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2202.603209] env[62816]: value = "task-1789544" [ 2202.603209] env[62816]: _type = "Task" [ 2202.603209] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.610665] env[62816]: DEBUG oslo_vmware.api [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789544, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.113604] env[62816]: DEBUG oslo_vmware.api [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789544, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126377} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.113863] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2203.114067] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2203.114256] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2203.114435] env[62816]: INFO nova.compute.manager [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2203.114677] env[62816]: DEBUG oslo.service.loopingcall [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2203.114871] env[62816]: DEBUG nova.compute.manager [-] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2203.114965] env[62816]: DEBUG nova.network.neutron [-] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2203.332583] env[62816]: DEBUG nova.compute.manager [req-6b5626e4-7e76-4f4f-b474-481f05bb4c2e req-6475d1f8-7060-43b5-8147-25c1692cd2be service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Received event network-vif-deleted-90b50828-9370-443f-83e2-ba2bd00e40c6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2203.332776] env[62816]: INFO nova.compute.manager [req-6b5626e4-7e76-4f4f-b474-481f05bb4c2e req-6475d1f8-7060-43b5-8147-25c1692cd2be service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Neutron deleted interface 90b50828-9370-443f-83e2-ba2bd00e40c6; detaching it from the instance and deleting it from the info cache [ 2203.332926] env[62816]: DEBUG nova.network.neutron [req-6b5626e4-7e76-4f4f-b474-481f05bb4c2e req-6475d1f8-7060-43b5-8147-25c1692cd2be service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2203.813462] env[62816]: DEBUG nova.network.neutron [-] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2203.835598] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b882f21-2533-4729-a90f-d861fdb94980 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.845309] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51439d15-07e3-4196-8d83-cbfed8beefaa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.870076] env[62816]: DEBUG nova.compute.manager [req-6b5626e4-7e76-4f4f-b474-481f05bb4c2e req-6475d1f8-7060-43b5-8147-25c1692cd2be service nova] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Detach interface failed, port_id=90b50828-9370-443f-83e2-ba2bd00e40c6, reason: Instance 3e412acc-287a-4e66-b4f4-28d74de49d8d could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2204.316536] env[62816]: INFO nova.compute.manager [-] [instance: 3e412acc-287a-4e66-b4f4-28d74de49d8d] Took 1.20 seconds to deallocate network for instance. [ 2204.823175] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2204.823782] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2204.823782] env[62816]: DEBUG nova.objects.instance [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid 3e412acc-287a-4e66-b4f4-28d74de49d8d {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2205.365774] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e2685a-a8e7-484e-88ed-d6ad3f0364be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.372839] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558dc57c-954b-4ac3-9751-d8c509d593f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.401956] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5c0894-c830-4658-8a6c-57f61d9b4185 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.408828] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9386d406-0f9a-4861-adf8-198fc9327049 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.421408] env[62816]: DEBUG nova.compute.provider_tree [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2205.941180] env[62816]: ERROR nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [req-2e21c7aa-ce69-4388-bed6-760def4f7b0e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2e21c7aa-ce69-4388-bed6-760def4f7b0e"}]} [ 2205.957032] env[62816]: DEBUG nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2205.969918] env[62816]: DEBUG nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2205.970148] env[62816]: DEBUG nova.compute.provider_tree [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2205.980094] env[62816]: DEBUG nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2205.997068] env[62816]: DEBUG nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2206.029474] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75ebc8a-e800-46e9-9cbd-7394e45af128 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.036478] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e7b46f-91c1-4370-825a-8286c8f69ba7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.066253] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05672d98-deda-46c3-b9e4-2546e0aa02bf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.073282] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328b109e-ee37-46f9-b198-c5440d34beca {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.086075] env[62816]: DEBUG nova.compute.provider_tree [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2206.613855] env[62816]: DEBUG nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 181 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2206.614141] env[62816]: DEBUG nova.compute.provider_tree [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 181 to 182 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2206.614330] env[62816]: DEBUG nova.compute.provider_tree [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2207.119534] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.296s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.137600] env[62816]: INFO nova.scheduler.client.report [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance 3e412acc-287a-4e66-b4f4-28d74de49d8d [ 2207.644693] env[62816]: DEBUG oslo_concurrency.lockutils [None req-58e242ee-7f6e-47ce-a21f-631516d2ecc6 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "3e412acc-287a-4e66-b4f4-28d74de49d8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.652s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.930340] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "cd337b6c-97c9-4f88-8c16-d0ae40549426" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.930630] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.432712] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2210.019392] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2210.019711] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2210.021257] env[62816]: INFO nova.compute.claims [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2211.065246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a1b4aa-db2b-4ce6-bbb2-55bed5d6d2aa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.073139] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b3ca8b-c4b3-4061-97ed-88457e928a94 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.102160] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dade49b-f978-4534-8681-d5c6f7ce31be {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.109095] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153b9f38-0270-4894-96ff-6273ed002884 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.121525] env[62816]: DEBUG nova.compute.provider_tree [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2211.624922] env[62816]: DEBUG nova.scheduler.client.report [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2212.130618] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.111s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2212.131174] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2212.637202] env[62816]: DEBUG nova.compute.utils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2212.638907] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2212.639094] env[62816]: DEBUG nova.network.neutron [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2212.687408] env[62816]: DEBUG nova.policy [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2212.941820] env[62816]: DEBUG nova.network.neutron [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Successfully created port: 31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2213.141743] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2214.151050] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2214.179201] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2214.179466] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2214.179621] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2214.179802] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2214.179945] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2214.180106] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2214.180319] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2214.180477] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2214.180644] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2214.180806] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2214.180977] env[62816]: DEBUG nova.virt.hardware [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2214.181877] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0ec837-ffdc-44dc-a09b-39c3893bb297 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.190113] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a847aae9-e2d0-4a73-b10c-131f600d21e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.310369] env[62816]: DEBUG nova.compute.manager [req-874a4662-6665-4442-88cf-d4b500010eb4 req-f4598a05-00b6-42ca-9bfa-fca429704901 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Received event network-vif-plugged-31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2214.310730] env[62816]: DEBUG oslo_concurrency.lockutils [req-874a4662-6665-4442-88cf-d4b500010eb4 req-f4598a05-00b6-42ca-9bfa-fca429704901 service nova] Acquiring lock "cd337b6c-97c9-4f88-8c16-d0ae40549426-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.311070] env[62816]: DEBUG oslo_concurrency.lockutils [req-874a4662-6665-4442-88cf-d4b500010eb4 req-f4598a05-00b6-42ca-9bfa-fca429704901 service nova] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.311356] env[62816]: DEBUG oslo_concurrency.lockutils [req-874a4662-6665-4442-88cf-d4b500010eb4 req-f4598a05-00b6-42ca-9bfa-fca429704901 service nova] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.311642] env[62816]: DEBUG nova.compute.manager [req-874a4662-6665-4442-88cf-d4b500010eb4 req-f4598a05-00b6-42ca-9bfa-fca429704901 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] No waiting events found dispatching network-vif-plugged-31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2214.311916] env[62816]: WARNING nova.compute.manager [req-874a4662-6665-4442-88cf-d4b500010eb4 req-f4598a05-00b6-42ca-9bfa-fca429704901 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Received unexpected event network-vif-plugged-31d7ad99-b27a-4891-9809-d3674703c684 for instance with vm_state building and task_state spawning. [ 2214.438090] env[62816]: DEBUG nova.network.neutron [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Successfully updated port: 31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2214.941273] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-cd337b6c-97c9-4f88-8c16-d0ae40549426" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2214.941461] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-cd337b6c-97c9-4f88-8c16-d0ae40549426" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2214.941621] env[62816]: DEBUG nova.network.neutron [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2215.472307] env[62816]: DEBUG nova.network.neutron [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2215.588913] env[62816]: DEBUG nova.network.neutron [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Updating instance_info_cache with network_info: [{"id": "31d7ad99-b27a-4891-9809-d3674703c684", "address": "fa:16:3e:c9:17:c4", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31d7ad99-b2", "ovs_interfaceid": "31d7ad99-b27a-4891-9809-d3674703c684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.091910] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-cd337b6c-97c9-4f88-8c16-d0ae40549426" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.092353] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Instance network_info: |[{"id": "31d7ad99-b27a-4891-9809-d3674703c684", "address": "fa:16:3e:c9:17:c4", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31d7ad99-b2", "ovs_interfaceid": "31d7ad99-b27a-4891-9809-d3674703c684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2216.092811] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:17:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31d7ad99-b27a-4891-9809-d3674703c684', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2216.100798] env[62816]: DEBUG oslo.service.loopingcall [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.101014] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2216.101240] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81bebab5-6a91-4ece-9f16-c6fbf741fec1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.121388] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2216.121388] env[62816]: value = "task-1789545" [ 2216.121388] env[62816]: _type = "Task" [ 2216.121388] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.128484] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789545, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.336079] env[62816]: DEBUG nova.compute.manager [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Received event network-changed-31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2216.336310] env[62816]: DEBUG nova.compute.manager [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Refreshing instance network info cache due to event network-changed-31d7ad99-b27a-4891-9809-d3674703c684. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2216.336544] env[62816]: DEBUG oslo_concurrency.lockutils [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] Acquiring lock "refresh_cache-cd337b6c-97c9-4f88-8c16-d0ae40549426" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.336692] env[62816]: DEBUG oslo_concurrency.lockutils [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] Acquired lock "refresh_cache-cd337b6c-97c9-4f88-8c16-d0ae40549426" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.336852] env[62816]: DEBUG nova.network.neutron [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Refreshing network info cache for port 31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2216.630713] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789545, 'name': CreateVM_Task, 'duration_secs': 0.296742} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2216.631130] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2216.631718] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.631947] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.632378] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2216.632675] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d504378-e8e4-4926-b92e-028b33d5cd46 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.637193] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2216.637193] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]5292fb42-7cca-92c5-71e8-bc8ea7bd76d3" [ 2216.637193] env[62816]: _type = "Task" [ 2216.637193] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.644226] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5292fb42-7cca-92c5-71e8-bc8ea7bd76d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.017917] env[62816]: DEBUG nova.network.neutron [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Updated VIF entry in instance network info cache for port 31d7ad99-b27a-4891-9809-d3674703c684. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2217.018047] env[62816]: DEBUG nova.network.neutron [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Updating instance_info_cache with network_info: [{"id": "31d7ad99-b27a-4891-9809-d3674703c684", "address": "fa:16:3e:c9:17:c4", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31d7ad99-b2", "ovs_interfaceid": "31d7ad99-b27a-4891-9809-d3674703c684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.147730] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]5292fb42-7cca-92c5-71e8-bc8ea7bd76d3, 'name': SearchDatastore_Task, 'duration_secs': 0.010614} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.147969] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.148225] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2217.148460] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2217.148611] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.148790] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2217.149060] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3344376a-36db-406c-8046-b6b176db168f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.156868] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2217.157050] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2217.157713] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58408a3a-4a48-4466-8b5c-5ecbafd79079 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.162690] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2217.162690] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52b322e4-6bdf-3b49-a4c8-e361c215b485" [ 2217.162690] env[62816]: _type = "Task" [ 2217.162690] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.169687] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b322e4-6bdf-3b49-a4c8-e361c215b485, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.520702] env[62816]: DEBUG oslo_concurrency.lockutils [req-95baa4bf-ba80-4853-b32e-f775bf9fb69e req-dc7b88ed-c807-42a1-b2c5-771ecf5be5e0 service nova] Releasing lock "refresh_cache-cd337b6c-97c9-4f88-8c16-d0ae40549426" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2217.673239] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52b322e4-6bdf-3b49-a4c8-e361c215b485, 'name': SearchDatastore_Task, 'duration_secs': 0.008302} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2217.674015] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf40d3ae-ccf9-4cbf-90eb-a2c37e98ddc9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.678672] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2217.678672] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52697ce1-f5cd-ed65-0eae-bb77c5137ef5" [ 2217.678672] env[62816]: _type = "Task" [ 2217.678672] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2217.685538] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52697ce1-f5cd-ed65-0eae-bb77c5137ef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.188690] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52697ce1-f5cd-ed65-0eae-bb77c5137ef5, 'name': SearchDatastore_Task, 'duration_secs': 0.009094} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.188969] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.189248] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] cd337b6c-97c9-4f88-8c16-d0ae40549426/cd337b6c-97c9-4f88-8c16-d0ae40549426.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2218.189555] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e700bb0e-648e-4da5-b799-bf03d4d1ece0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.196674] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2218.196674] env[62816]: value = "task-1789546" [ 2218.196674] env[62816]: _type = "Task" [ 2218.196674] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.203798] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789546, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.705960] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789546, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.445048} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.706375] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] cd337b6c-97c9-4f88-8c16-d0ae40549426/cd337b6c-97c9-4f88-8c16-d0ae40549426.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2218.706458] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2218.706650] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c79ea726-733a-496b-bacd-64d9a56a131d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.713184] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2218.713184] env[62816]: value = "task-1789547" [ 2218.713184] env[62816]: _type = "Task" [ 2218.713184] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.720384] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.223376] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05875} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.223649] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2219.224387] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f9315c-076c-413c-abfe-229a455d3526 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.246542] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] cd337b6c-97c9-4f88-8c16-d0ae40549426/cd337b6c-97c9-4f88-8c16-d0ae40549426.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2219.246777] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8688c059-7083-4e38-bf2b-d9758b719d51 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.265311] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2219.265311] env[62816]: value = "task-1789548" [ 2219.265311] env[62816]: _type = "Task" [ 2219.265311] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.272270] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789548, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.775962] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789548, 'name': ReconfigVM_Task, 'duration_secs': 0.281916} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.776381] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Reconfigured VM instance instance-00000078 to attach disk [datastore1] cd337b6c-97c9-4f88-8c16-d0ae40549426/cd337b6c-97c9-4f88-8c16-d0ae40549426.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2219.776856] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b105cb8b-59a7-4cb4-a6e9-444e8a83dcc7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.783344] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2219.783344] env[62816]: value = "task-1789549" [ 2219.783344] env[62816]: _type = "Task" [ 2219.783344] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.790318] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789549, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.293786] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789549, 'name': Rename_Task, 'duration_secs': 0.128715} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.294080] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2220.294320] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-22aef063-ec50-4f2c-92e1-aa884e981f88 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.300182] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2220.300182] env[62816]: value = "task-1789550" [ 2220.300182] env[62816]: _type = "Task" [ 2220.300182] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.308325] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.810222] env[62816]: DEBUG oslo_vmware.api [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789550, 'name': PowerOnVM_Task, 'duration_secs': 0.427293} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.810587] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2220.810666] env[62816]: INFO nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Took 6.66 seconds to spawn the instance on the hypervisor. [ 2220.810847] env[62816]: DEBUG nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2220.811626] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed0d056-2a65-4461-acca-37fe44e16fbd {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2221.328996] env[62816]: INFO nova.compute.manager [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Took 11.39 seconds to build instance. [ 2221.831092] env[62816]: DEBUG oslo_concurrency.lockutils [None req-1527bc77-b415-418d-8189-02da10c13104 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.900s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2222.232478] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "cd337b6c-97c9-4f88-8c16-d0ae40549426" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2222.232702] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.232911] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "cd337b6c-97c9-4f88-8c16-d0ae40549426-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2222.233114] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2222.233284] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2222.235817] env[62816]: INFO nova.compute.manager [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Terminating instance [ 2222.237526] env[62816]: DEBUG nova.compute.manager [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2222.237722] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2222.238555] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582d3f7e-7358-4d51-810b-b16f95367b74 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.246342] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2222.246555] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7c53b6d-2a6d-42b4-b9a3-9f76aeabfaac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.252110] env[62816]: DEBUG oslo_vmware.api [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2222.252110] env[62816]: value = "task-1789551" [ 2222.252110] env[62816]: _type = "Task" [ 2222.252110] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.259131] env[62816]: DEBUG oslo_vmware.api [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789551, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.763622] env[62816]: DEBUG oslo_vmware.api [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789551, 'name': PowerOffVM_Task, 'duration_secs': 0.179436} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.763885] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2222.764066] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2222.764307] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7b7a092-a68e-427a-be53-1174234e23d7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.834811] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2222.835223] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2222.835223] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] cd337b6c-97c9-4f88-8c16-d0ae40549426 {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2222.835484] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0863b34d-79b5-4360-a27d-f0f8927bb6b0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.841552] env[62816]: DEBUG oslo_vmware.api [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2222.841552] env[62816]: value = "task-1789553" [ 2222.841552] env[62816]: _type = "Task" [ 2222.841552] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.848850] env[62816]: DEBUG oslo_vmware.api [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.351372] env[62816]: DEBUG oslo_vmware.api [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172847} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.351629] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2223.351814] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2223.351992] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2223.352187] env[62816]: INFO nova.compute.manager [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2223.352433] env[62816]: DEBUG oslo.service.loopingcall [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2223.352623] env[62816]: DEBUG nova.compute.manager [-] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2223.352718] env[62816]: DEBUG nova.network.neutron [-] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2223.570168] env[62816]: DEBUG nova.compute.manager [req-e552c6f3-ad2c-41dc-b07e-9fe05cc50ee0 req-08091fb2-7f82-4cdc-99a7-823fb719b475 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Received event network-vif-deleted-31d7ad99-b27a-4891-9809-d3674703c684 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2223.570363] env[62816]: INFO nova.compute.manager [req-e552c6f3-ad2c-41dc-b07e-9fe05cc50ee0 req-08091fb2-7f82-4cdc-99a7-823fb719b475 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Neutron deleted interface 31d7ad99-b27a-4891-9809-d3674703c684; detaching it from the instance and deleting it from the info cache [ 2223.570570] env[62816]: DEBUG nova.network.neutron [req-e552c6f3-ad2c-41dc-b07e-9fe05cc50ee0 req-08091fb2-7f82-4cdc-99a7-823fb719b475 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2224.047531] env[62816]: DEBUG nova.network.neutron [-] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2224.072354] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7bac75f8-47c1-4e5b-93fe-b071f8f02857 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.082367] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa28f3d9-2b54-4790-95a0-530306c906a9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.104913] env[62816]: DEBUG nova.compute.manager [req-e552c6f3-ad2c-41dc-b07e-9fe05cc50ee0 req-08091fb2-7f82-4cdc-99a7-823fb719b475 service nova] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Detach interface failed, port_id=31d7ad99-b27a-4891-9809-d3674703c684, reason: Instance cd337b6c-97c9-4f88-8c16-d0ae40549426 could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2224.549722] env[62816]: INFO nova.compute.manager [-] [instance: cd337b6c-97c9-4f88-8c16-d0ae40549426] Took 1.20 seconds to deallocate network for instance. [ 2225.055907] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2225.056231] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2225.056437] env[62816]: DEBUG nova.objects.instance [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid cd337b6c-97c9-4f88-8c16-d0ae40549426 {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2225.599530] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73a5803-a927-4ca5-af6d-3d03daf7a692 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.607506] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d088052-9f61-45e9-89e2-658fe28a8593 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.638054] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e065ed58-d4a4-47a9-9e38-c9a5000827e6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.645310] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a800d9d-04d9-4d2c-9bc7-3513a6b59c37 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2225.658152] env[62816]: DEBUG nova.compute.provider_tree [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2226.177359] env[62816]: ERROR nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [req-9e5c37ea-cffe-413f-9cd5-8b5359636e0b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9e5c37ea-cffe-413f-9cd5-8b5359636e0b"}]} [ 2226.192827] env[62816]: DEBUG nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2226.205222] env[62816]: DEBUG nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2226.205466] env[62816]: DEBUG nova.compute.provider_tree [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2226.214876] env[62816]: DEBUG nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2226.230882] env[62816]: DEBUG nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2226.263197] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ec5b1f-daa0-4c6d-b077-b24968c17a63 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.273096] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33b03ed7-55cb-4b63-b91f-f5bc1fee9d00 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.315941] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0cd9ac-0c7b-49b5-9115-ea9ce80bbed8 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.322887] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4501d88-d68f-47aa-834a-04d6b3840b11 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.336455] env[62816]: DEBUG nova.compute.provider_tree [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2226.866848] env[62816]: DEBUG nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 183 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2226.867136] env[62816]: DEBUG nova.compute.provider_tree [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 183 to 184 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2226.867321] env[62816]: DEBUG nova.compute.provider_tree [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2227.372195] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.316s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.389805] env[62816]: INFO nova.scheduler.client.report [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance cd337b6c-97c9-4f88-8c16-d0ae40549426 [ 2227.898865] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7cd78a85-942c-4b7c-be6f-91074c07a11f tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "cd337b6c-97c9-4f88-8c16-d0ae40549426" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.666s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2229.096931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.097228] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.599590] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Starting instance... {{(pid=62816) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2230.120257] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.120521] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.121967] env[62816]: INFO nova.compute.claims [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2231.166203] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c551f7-1021-4e3c-add1-03a418a54f44 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.173615] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653afb7e-8934-40e9-bf9d-12ee86bff6f1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.202989] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e415705d-a651-4ffa-8de5-e710dd934a5f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.209711] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b244bb-93dc-46f2-9b67-ca4f9c260cdf {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.222016] env[62816]: DEBUG nova.compute.provider_tree [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2231.725731] env[62816]: DEBUG nova.scheduler.client.report [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2232.230483] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.110s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2232.231042] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Start building networks asynchronously for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2232.735661] env[62816]: DEBUG nova.compute.utils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Using /dev/sd instead of None {{(pid=62816) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2232.737078] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Allocating IP information in the background. {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2232.737250] env[62816]: DEBUG nova.network.neutron [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] allocate_for_instance() {{(pid=62816) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2232.787176] env[62816]: DEBUG nova.policy [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bef57e7a275d4b43a65fb4ed7c238ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3b1be6e60f4c55be156abede3ea8ce', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62816) authorize /opt/stack/nova/nova/policy.py:201}} [ 2233.030927] env[62816]: DEBUG nova.network.neutron [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Successfully created port: b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2233.241285] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Start building block device mappings for instance. {{(pid=62816) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2234.252575] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Start spawning the instance on the hypervisor. {{(pid=62816) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2234.278279] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-12T02:35:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-12T02:35:32Z,direct_url=,disk_format='vmdk',id=844838ed-b150-482e-a0f6-dcce37470b52,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='b785e717cfe540028c6aa1636fe2ce35',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-12T02:35:32Z,virtual_size=,visibility=), allow threads: False {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2234.278546] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2234.278705] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image limits 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2234.278885] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Flavor pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2234.279041] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Image pref 0:0:0 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2234.279198] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62816) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2234.279407] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2234.279567] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2234.279732] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Got 1 possible topologies {{(pid=62816) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2234.279895] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2234.280080] env[62816]: DEBUG nova.virt.hardware [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62816) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2234.280934] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c85085e-0a26-4c81-a1ac-f940a040a437 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.288748] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38532156-5486-4e9f-a255-c9759896674f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.972020] env[62816]: DEBUG nova.network.neutron [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Successfully updated port: b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2234.974709] env[62816]: DEBUG nova.compute.manager [req-ff14c752-f18c-4cff-9707-2ada8a2ca535 req-42edc173-2fdc-424d-9cb6-ba0a6457636a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Received event network-vif-plugged-b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2234.974924] env[62816]: DEBUG oslo_concurrency.lockutils [req-ff14c752-f18c-4cff-9707-2ada8a2ca535 req-42edc173-2fdc-424d-9cb6-ba0a6457636a service nova] Acquiring lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2234.975166] env[62816]: DEBUG oslo_concurrency.lockutils [req-ff14c752-f18c-4cff-9707-2ada8a2ca535 req-42edc173-2fdc-424d-9cb6-ba0a6457636a service nova] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2234.975358] env[62816]: DEBUG oslo_concurrency.lockutils [req-ff14c752-f18c-4cff-9707-2ada8a2ca535 req-42edc173-2fdc-424d-9cb6-ba0a6457636a service nova] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2234.975532] env[62816]: DEBUG nova.compute.manager [req-ff14c752-f18c-4cff-9707-2ada8a2ca535 req-42edc173-2fdc-424d-9cb6-ba0a6457636a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] No waiting events found dispatching network-vif-plugged-b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2234.975696] env[62816]: WARNING nova.compute.manager [req-ff14c752-f18c-4cff-9707-2ada8a2ca535 req-42edc173-2fdc-424d-9cb6-ba0a6457636a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Received unexpected event network-vif-plugged-b5d5cf67-0885-45e1-a840-a7e0f72549a2 for instance with vm_state building and task_state spawning. [ 2235.477781] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "refresh_cache-7331a0d0-7cd1-4627-93bd-c7680c5ff66c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2235.478154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "refresh_cache-7331a0d0-7cd1-4627-93bd-c7680c5ff66c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2235.478154] env[62816]: DEBUG nova.network.neutron [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Building network info cache for instance {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2236.009882] env[62816]: DEBUG nova.network.neutron [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Instance cache missing network info. {{(pid=62816) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2236.146739] env[62816]: DEBUG nova.network.neutron [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Updating instance_info_cache with network_info: [{"id": "b5d5cf67-0885-45e1-a840-a7e0f72549a2", "address": "fa:16:3e:8d:ef:5b", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5d5cf67-08", "ovs_interfaceid": "b5d5cf67-0885-45e1-a840-a7e0f72549a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.420720] env[62816]: DEBUG nova.compute.manager [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Received event network-changed-b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2236.420982] env[62816]: DEBUG nova.compute.manager [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Refreshing instance network info cache due to event network-changed-b5d5cf67-0885-45e1-a840-a7e0f72549a2. {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2236.421235] env[62816]: DEBUG oslo_concurrency.lockutils [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] Acquiring lock "refresh_cache-7331a0d0-7cd1-4627-93bd-c7680c5ff66c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2236.649918] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "refresh_cache-7331a0d0-7cd1-4627-93bd-c7680c5ff66c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2236.650310] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Instance network_info: |[{"id": "b5d5cf67-0885-45e1-a840-a7e0f72549a2", "address": "fa:16:3e:8d:ef:5b", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5d5cf67-08", "ovs_interfaceid": "b5d5cf67-0885-45e1-a840-a7e0f72549a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62816) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2236.650588] env[62816]: DEBUG oslo_concurrency.lockutils [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] Acquired lock "refresh_cache-7331a0d0-7cd1-4627-93bd-c7680c5ff66c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2236.650774] env[62816]: DEBUG nova.network.neutron [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Refreshing network info cache for port b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2236.652110] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:ef:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f17856cf-7248-414b-bde6-8c90cfb4c593', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5d5cf67-0885-45e1-a840-a7e0f72549a2', 'vif_model': 'vmxnet3'}] {{(pid=62816) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2236.659529] env[62816]: DEBUG oslo.service.loopingcall [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2236.660481] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Creating VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2236.660714] env[62816]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-581d7153-41cb-4da7-9e7f-9169936af9d2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.680470] env[62816]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2236.680470] env[62816]: value = "task-1789554" [ 2236.680470] env[62816]: _type = "Task" [ 2236.680470] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2236.688154] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789554, 'name': CreateVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.189957] env[62816]: DEBUG oslo_vmware.api [-] Task: {'id': task-1789554, 'name': CreateVM_Task, 'duration_secs': 0.304193} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.192012] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Created VM on the ESX host {{(pid=62816) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2237.192942] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2237.193141] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2237.193479] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2237.193948] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-61dec8c2-6558-422d-8818-1736acd73030 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.198428] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2237.198428] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]521a5601-2cbe-fec1-6c2b-40c2b030f50c" [ 2237.198428] env[62816]: _type = "Task" [ 2237.198428] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.205804] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521a5601-2cbe-fec1-6c2b-40c2b030f50c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.514780] env[62816]: DEBUG nova.network.neutron [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Updated VIF entry in instance network info cache for port b5d5cf67-0885-45e1-a840-a7e0f72549a2. {{(pid=62816) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2237.515169] env[62816]: DEBUG nova.network.neutron [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Updating instance_info_cache with network_info: [{"id": "b5d5cf67-0885-45e1-a840-a7e0f72549a2", "address": "fa:16:3e:8d:ef:5b", "network": {"id": "b6172888-7d21-4d58-9af0-1456caa6536d", "bridge": "br-int", "label": "tempest-ServersTestJSON-664441236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fc3b1be6e60f4c55be156abede3ea8ce", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f17856cf-7248-414b-bde6-8c90cfb4c593", "external-id": "nsx-vlan-transportzone-341", "segmentation_id": 341, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5d5cf67-08", "ovs_interfaceid": "b5d5cf67-0885-45e1-a840-a7e0f72549a2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2237.708326] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]521a5601-2cbe-fec1-6c2b-40c2b030f50c, 'name': SearchDatastore_Task, 'duration_secs': 0.010207} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2237.708682] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2237.708804] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Processing image 844838ed-b150-482e-a0f6-dcce37470b52 {{(pid=62816) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2237.709015] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2237.709169] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquired lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2237.709358] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2237.709601] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63888071-6a7b-4136-ab00-e47f3df0cafe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.717234] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62816) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2237.717415] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62816) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2237.718027] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4245aa80-16ae-42af-8779-e222d29fd1b9 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2237.722859] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2237.722859] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]52625955-af3f-7887-1a66-4060e0de9f90" [ 2237.722859] env[62816]: _type = "Task" [ 2237.722859] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2237.729424] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52625955-af3f-7887-1a66-4060e0de9f90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.018363] env[62816]: DEBUG oslo_concurrency.lockutils [req-c27f0617-32db-4f41-97e1-764584d12804 req-1b96921d-f266-4baa-8435-1d293981820a service nova] Releasing lock "refresh_cache-7331a0d0-7cd1-4627-93bd-c7680c5ff66c" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2238.233630] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]52625955-af3f-7887-1a66-4060e0de9f90, 'name': SearchDatastore_Task, 'duration_secs': 0.008138} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2238.234388] env[62816]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c28144fd-e6a4-4024-a949-71846b17e84d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.239249] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2238.239249] env[62816]: value = "session[52166549-a417-fee9-199e-38636bfc0ddd]525f9a3c-b409-034e-ce00-99d596a5c661" [ 2238.239249] env[62816]: _type = "Task" [ 2238.239249] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.246298] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525f9a3c-b409-034e-ce00-99d596a5c661, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.749412] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': session[52166549-a417-fee9-199e-38636bfc0ddd]525f9a3c-b409-034e-ce00-99d596a5c661, 'name': SearchDatastore_Task, 'duration_secs': 0.009302} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2238.749756] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Releasing lock "[datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk" {{(pid=62816) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2238.749924] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 7331a0d0-7cd1-4627-93bd-c7680c5ff66c/7331a0d0-7cd1-4627-93bd-c7680c5ff66c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2238.750217] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-954da1f7-084d-4617-ab88-bb684e541f22 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.756493] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2238.756493] env[62816]: value = "task-1789555" [ 2238.756493] env[62816]: _type = "Task" [ 2238.756493] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2238.764623] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.265969] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476913} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.266264] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/844838ed-b150-482e-a0f6-dcce37470b52/844838ed-b150-482e-a0f6-dcce37470b52.vmdk to [datastore1] 7331a0d0-7cd1-4627-93bd-c7680c5ff66c/7331a0d0-7cd1-4627-93bd-c7680c5ff66c.vmdk {{(pid=62816) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2239.266480] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Extending root virtual disk to 1048576 {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2239.266728] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9973c49a-093b-4b3d-9353-7781ce3117b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.273320] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2239.273320] env[62816]: value = "task-1789556" [ 2239.273320] env[62816]: _type = "Task" [ 2239.273320] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.280350] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.783404] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060879} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.783808] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Extended root virtual disk {{(pid=62816) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2239.784512] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5f90d2-7e50-4076-ae58-c152b30ce2b3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.805423] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 7331a0d0-7cd1-4627-93bd-c7680c5ff66c/7331a0d0-7cd1-4627-93bd-c7680c5ff66c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2239.805673] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-65ad992f-b0ba-42f2-a562-bd2f53083ee7 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.824211] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2239.824211] env[62816]: value = "task-1789557" [ 2239.824211] env[62816]: _type = "Task" [ 2239.824211] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.831646] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789557, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.334185] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789557, 'name': ReconfigVM_Task, 'duration_secs': 0.245322} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2240.334460] env[62816]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 7331a0d0-7cd1-4627-93bd-c7680c5ff66c/7331a0d0-7cd1-4627-93bd-c7680c5ff66c.vmdk or device None with type sparse {{(pid=62816) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2240.335091] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-097a79ac-39d5-41f1-af4a-afb6dd2d2392 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2240.340838] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2240.340838] env[62816]: value = "task-1789558" [ 2240.340838] env[62816]: _type = "Task" [ 2240.340838] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2240.347762] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789558, 'name': Rename_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.850853] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789558, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.350943] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789558, 'name': Rename_Task} progress is 99%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.852208] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789558, 'name': Rename_Task, 'duration_secs': 1.111709} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2241.852604] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Powering on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2241.852733] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3920ac07-6923-461f-9bdf-4924f5e3c0da {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2241.859449] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2241.859449] env[62816]: value = "task-1789559" [ 2241.859449] env[62816]: _type = "Task" [ 2241.859449] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2241.867618] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789559, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.370414] env[62816]: DEBUG oslo_vmware.api [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789559, 'name': PowerOnVM_Task, 'duration_secs': 0.42226} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.370681] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Powered on the VM {{(pid=62816) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2242.370881] env[62816]: INFO nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Took 8.12 seconds to spawn the instance on the hypervisor. [ 2242.371068] env[62816]: DEBUG nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2242.371823] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adb251ed-1a25-4890-9c43-a360d203be49 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.888387] env[62816]: INFO nova.compute.manager [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Took 12.78 seconds to build instance. [ 2243.390147] env[62816]: DEBUG oslo_concurrency.lockutils [None req-9b5bb7a0-a16e-44f6-b67a-c5756f428131 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.293s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2243.505974] env[62816]: DEBUG oslo_concurrency.lockutils [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2243.506275] env[62816]: DEBUG oslo_concurrency.lockutils [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2243.506470] env[62816]: DEBUG nova.compute.manager [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2243.507721] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4ca19bc-7dac-40aa-9070-13bf750627c0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.516454] env[62816]: DEBUG nova.compute.manager [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62816) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2243.517197] env[62816]: DEBUG nova.objects.instance [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'flavor' on Instance uuid 7331a0d0-7cd1-4627-93bd-c7680c5ff66c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2244.022699] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2244.023103] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56f4e0f1-a05e-48e6-b19c-a2b88f54e9b1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.030339] env[62816]: DEBUG oslo_vmware.api [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2244.030339] env[62816]: value = "task-1789560" [ 2244.030339] env[62816]: _type = "Task" [ 2244.030339] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.038473] env[62816]: DEBUG oslo_vmware.api [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.540651] env[62816]: DEBUG oslo_vmware.api [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789560, 'name': PowerOffVM_Task, 'duration_secs': 0.198276} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.540917] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2244.541109] env[62816]: DEBUG nova.compute.manager [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Checking state {{(pid=62816) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2244.541838] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1984190-2e21-45d2-9693-92b121aba89a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.053154] env[62816]: DEBUG oslo_concurrency.lockutils [None req-55dcbd12-dfe3-4ade-a626-5fbfa3f2dc10 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.547s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2245.456259] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2246.136989] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.136989] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.136989] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.136989] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.137515] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.139240] env[62816]: INFO nova.compute.manager [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Terminating instance [ 2246.141035] env[62816]: DEBUG nova.compute.manager [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2246.141259] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2246.142133] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7455c722-655d-421f-aaa2-d80c2c2472f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.149416] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2246.149909] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed49bd45-b624-4c93-afac-0b8c7132db82 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.497119] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2246.497352] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2246.497516] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] 7331a0d0-7cd1-4627-93bd-c7680c5ff66c {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2246.497781] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdf7a064-d3ee-4beb-8d36-5fc7622a42ac {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.504019] env[62816]: DEBUG oslo_vmware.api [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2246.504019] env[62816]: value = "task-1789562" [ 2246.504019] env[62816]: _type = "Task" [ 2246.504019] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.511855] env[62816]: DEBUG oslo_vmware.api [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789562, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2247.013593] env[62816]: DEBUG oslo_vmware.api [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789562, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168364} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2247.013857] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2247.014032] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2247.014212] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2247.014384] env[62816]: INFO nova.compute.manager [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Took 0.87 seconds to destroy the instance on the hypervisor. [ 2247.014620] env[62816]: DEBUG oslo.service.loopingcall [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2247.014807] env[62816]: DEBUG nova.compute.manager [-] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2247.014899] env[62816]: DEBUG nova.network.neutron [-] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2247.228174] env[62816]: DEBUG nova.compute.manager [req-af3f65b7-ea52-4dbf-b46f-43a6e6f8dcf5 req-6c33e687-a660-4744-8f49-ba0e7adca018 service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Received event network-vif-deleted-b5d5cf67-0885-45e1-a840-a7e0f72549a2 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2247.228440] env[62816]: INFO nova.compute.manager [req-af3f65b7-ea52-4dbf-b46f-43a6e6f8dcf5 req-6c33e687-a660-4744-8f49-ba0e7adca018 service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Neutron deleted interface b5d5cf67-0885-45e1-a840-a7e0f72549a2; detaching it from the instance and deleting it from the info cache [ 2247.228628] env[62816]: DEBUG nova.network.neutron [req-af3f65b7-ea52-4dbf-b46f-43a6e6f8dcf5 req-6c33e687-a660-4744-8f49-ba0e7adca018 service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2247.709189] env[62816]: DEBUG nova.network.neutron [-] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2247.730401] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4916b504-f303-4aad-a1d3-bd096e5da05e {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.739935] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4c22fc-210e-4331-92e4-8054386a756f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.764359] env[62816]: DEBUG nova.compute.manager [req-af3f65b7-ea52-4dbf-b46f-43a6e6f8dcf5 req-6c33e687-a660-4744-8f49-ba0e7adca018 service nova] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Detach interface failed, port_id=b5d5cf67-0885-45e1-a840-a7e0f72549a2, reason: Instance 7331a0d0-7cd1-4627-93bd-c7680c5ff66c could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2248.211488] env[62816]: INFO nova.compute.manager [-] [instance: 7331a0d0-7cd1-4627-93bd-c7680c5ff66c] Took 1.20 seconds to deallocate network for instance. [ 2248.456781] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2248.457216] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62816) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2248.457216] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager.update_available_resource {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2248.718056] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.718325] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.718555] env[62816]: DEBUG nova.objects.instance [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid 7331a0d0-7cd1-4627-93bd-c7680c5ff66c {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2248.960410] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2249.263401] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e8634a-2871-4c05-aa6d-a11aa0ee317c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.270640] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef91e650-583b-4f92-b1a0-8c6c1577dcfe {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.299585] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3219325-c43c-4f03-a644-6d4cd2f4b246 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.306121] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80f23651-ff35-4003-b451-2e4d19fb8ffa {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.318393] env[62816]: DEBUG nova.compute.provider_tree [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2249.838052] env[62816]: ERROR nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [req-c6c7e096-d071-4fc2-8fda-47e3fa02466e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 27f49c85-1bb9-4d17-a914-e2f45a5e84fa. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c6c7e096-d071-4fc2-8fda-47e3fa02466e"}]} [ 2249.855393] env[62816]: DEBUG nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing inventories for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 2249.867796] env[62816]: DEBUG nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating ProviderTree inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 2249.868015] env[62816]: DEBUG nova.compute.provider_tree [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 161, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2249.877503] env[62816]: DEBUG nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing aggregate associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, aggregates: None {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 2249.893826] env[62816]: DEBUG nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Refreshing trait associations for resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa, traits: COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,HW_ARCH_X86_64 {{(pid=62816) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 2249.924789] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c6f0c5-3f8a-4b3a-968e-abc67cd71075 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.932248] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1e83b1-a82a-4dc5-99bb-c86f75ea77ad {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.961927] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b1d394-2886-486f-997f-4208265561c3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.968863] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871cc915-be28-4689-b195-6ae917e107f0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2249.981207] env[62816]: DEBUG nova.compute.provider_tree [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2250.510659] env[62816]: DEBUG nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updated inventory for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 2250.510941] env[62816]: DEBUG nova.compute.provider_tree [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating resource provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa generation from 185 to 186 during operation: update_inventory {{(pid=62816) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2250.511146] env[62816]: DEBUG nova.compute.provider_tree [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Updating inventory in ProviderTree for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2251.016106] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.298s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.018679] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 2.058s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.018873] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.019034] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62816) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2251.019875] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c93b9fe-f002-46e9-85ee-a14937c78302 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.027910] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b18e3a-baca-4a8d-9dc7-67cc57e9631a {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.041691] env[62816]: INFO nova.scheduler.client.report [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance 7331a0d0-7cd1-4627-93bd-c7680c5ff66c [ 2251.043089] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d5ccff-55b5-428d-88f9-f190e307016c {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.051576] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a5de80-eadc-444a-bec0-402624bb56e2 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.081233] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181159MB free_disk=162GB free_vcpus=48 pci_devices=None {{(pid=62816) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2251.081696] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.081696] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.551342] env[62816]: DEBUG oslo_concurrency.lockutils [None req-0fa3e17f-ee0b-42bb-b1a6-d638a9af2ba0 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "7331a0d0-7cd1-4627-93bd-c7680c5ff66c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.415s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.957091] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "30884afd-63d4-4a08-a59a-a9dcb4269dba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.957419] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.957570] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "30884afd-63d4-4a08-a59a-a9dcb4269dba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2251.957759] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2251.957931] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2251.960172] env[62816]: INFO nova.compute.manager [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Terminating instance [ 2251.961869] env[62816]: DEBUG nova.compute.manager [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Start destroying the instance on the hypervisor. {{(pid=62816) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2251.962098] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Destroying instance {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2251.962916] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe8491a-ee5b-48fc-ad47-40d2b23055d3 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.971080] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Powering off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2251.971301] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9935045f-99a5-49b0-980a-3e40fe34f8ce {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.976935] env[62816]: DEBUG oslo_vmware.api [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2251.976935] env[62816]: value = "task-1789563" [ 2251.976935] env[62816]: _type = "Task" [ 2251.976935] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.984061] env[62816]: DEBUG oslo_vmware.api [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.104266] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Instance 30884afd-63d4-4a08-a59a-a9dcb4269dba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62816) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2252.104618] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2252.104618] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62816) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2252.131689] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36239993-eb74-44f9-874b-aaf5527c1812 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.139101] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cad20d-803f-449a-a4cd-6968ea34bb5b {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.168246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96f9b38-ec09-4999-915c-b043a73f7ef1 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.175126] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6360a837-d247-4d41-a8f1-f6329aa05089 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.188649] env[62816]: DEBUG nova.compute.provider_tree [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2252.486932] env[62816]: DEBUG oslo_vmware.api [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789563, 'name': PowerOffVM_Task, 'duration_secs': 0.184997} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.487259] env[62816]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Powered off the VM {{(pid=62816) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2252.487440] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Unregistering the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2252.487670] env[62816]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2beeaa41-0783-466f-91ee-2a46809d6e21 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.560913] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Unregistered the VM {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2252.561184] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Deleting contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2252.561343] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleting the datastore file [datastore1] 30884afd-63d4-4a08-a59a-a9dcb4269dba {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2252.561598] env[62816]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a19d8124-4a15-4ce3-8c15-2894fdb0c0a0 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.567276] env[62816]: DEBUG oslo_vmware.api [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for the task: (returnval){ [ 2252.567276] env[62816]: value = "task-1789565" [ 2252.567276] env[62816]: _type = "Task" [ 2252.567276] env[62816]: } to complete. {{(pid=62816) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.574598] env[62816]: DEBUG oslo_vmware.api [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.691857] env[62816]: DEBUG nova.scheduler.client.report [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2253.077584] env[62816]: DEBUG oslo_vmware.api [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Task: {'id': task-1789565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169186} completed successfully. {{(pid=62816) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.077847] env[62816]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted the datastore file {{(pid=62816) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2253.078047] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Deleted contents of the VM from datastore datastore1 {{(pid=62816) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2253.078237] env[62816]: DEBUG nova.virt.vmwareapi.vmops [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Instance destroyed {{(pid=62816) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2253.078414] env[62816]: INFO nova.compute.manager [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2253.078653] env[62816]: DEBUG oslo.service.loopingcall [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62816) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2253.078847] env[62816]: DEBUG nova.compute.manager [-] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Deallocating network for instance {{(pid=62816) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2253.078941] env[62816]: DEBUG nova.network.neutron [-] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] deallocate_for_instance() {{(pid=62816) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2253.197249] env[62816]: DEBUG nova.compute.resource_tracker [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62816) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2253.197570] env[62816]: DEBUG oslo_concurrency.lockutils [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2253.316436] env[62816]: DEBUG nova.compute.manager [req-f622a5ad-788c-476f-9218-bea49f1d5876 req-b1bebc81-bbd5-45f7-90bb-399b3bb4ec43 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Received event network-vif-deleted-465cd9c4-6d8e-4837-8b90-d36e77571bb6 {{(pid=62816) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2253.316641] env[62816]: INFO nova.compute.manager [req-f622a5ad-788c-476f-9218-bea49f1d5876 req-b1bebc81-bbd5-45f7-90bb-399b3bb4ec43 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Neutron deleted interface 465cd9c4-6d8e-4837-8b90-d36e77571bb6; detaching it from the instance and deleting it from the info cache [ 2253.316819] env[62816]: DEBUG nova.network.neutron [req-f622a5ad-788c-476f-9218-bea49f1d5876 req-b1bebc81-bbd5-45f7-90bb-399b3bb4ec43 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2253.791765] env[62816]: DEBUG nova.network.neutron [-] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Updating instance_info_cache with network_info: [] {{(pid=62816) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2253.818918] env[62816]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3947eea0-34e0-4955-be31-14605ad85d0f {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.828726] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afc9b9f-cdbc-4044-8458-761f07236c62 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.850692] env[62816]: DEBUG nova.compute.manager [req-f622a5ad-788c-476f-9218-bea49f1d5876 req-b1bebc81-bbd5-45f7-90bb-399b3bb4ec43 service nova] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Detach interface failed, port_id=465cd9c4-6d8e-4837-8b90-d36e77571bb6, reason: Instance 30884afd-63d4-4a08-a59a-a9dcb4269dba could not be found. {{(pid=62816) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2254.197941] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.198271] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.198364] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Starting heal instance info cache {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2254.295055] env[62816]: INFO nova.compute.manager [-] [instance: 30884afd-63d4-4a08-a59a-a9dcb4269dba] Took 1.22 seconds to deallocate network for instance. [ 2254.701591] env[62816]: DEBUG nova.compute.manager [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Didn't find any instances for network info cache update. {{(pid=62816) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2254.701863] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.701978] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.702152] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2254.801082] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2254.801346] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2254.801570] env[62816]: DEBUG nova.objects.instance [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lazy-loading 'resources' on Instance uuid 30884afd-63d4-4a08-a59a-a9dcb4269dba {{(pid=62816) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2255.337794] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dd42ac-5c55-4304-a15c-ea680392f565 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.345246] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505814c6-0b9c-488c-9a61-fc534d680149 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.375171] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baad07d9-78bb-4194-9d06-6b0b3b2734f6 {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.382107] env[62816]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96b2b8d-19ea-4d9e-9beb-a23c76f5711d {{(pid=62816) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.394698] env[62816]: DEBUG nova.compute.provider_tree [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed in ProviderTree for provider: 27f49c85-1bb9-4d17-a914-e2f45a5e84fa {{(pid=62816) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2255.897661] env[62816]: DEBUG nova.scheduler.client.report [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Inventory has not changed for provider 27f49c85-1bb9-4d17-a914-e2f45a5e84fa based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 162, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62816) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2256.402747] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.420786] env[62816]: INFO nova.scheduler.client.report [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Deleted allocations for instance 30884afd-63d4-4a08-a59a-a9dcb4269dba [ 2256.456681] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2256.929077] env[62816]: DEBUG oslo_concurrency.lockutils [None req-f0a682b9-6054-4d57-8a75-0ae125f9bfe3 tempest-ServersTestJSON-1950930328 tempest-ServersTestJSON-1950930328-project-member] Lock "30884afd-63d4-4a08-a59a-a9dcb4269dba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.971s {{(pid=62816) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2256.960291] env[62816]: DEBUG oslo_service.periodic_task [None req-7f4dcdbb-e427-4900-917d-ad2ae785de98 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62816) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}